content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
PHP
PHP
add integration test for route fluent methods
ebee4628ec72349e685c304bc598b0f47d07f98c
<ide><path>tests/TestCase/Routing/RouteBuilderTest.php <ide> public function testHttpMethods($method) <ide> $route->defaults <ide> ); <ide> } <add> <add> /** <add> * Integration test for http method helpers and route fluent method <add> * <add> * @return void <add> */ <add> public function testHttpMethodIntegration() <add> { <add> $routes = new RouteBuilder($this->collection, '/'); <add> $routes->scope('/', function ($routes) { <add> $routes->get('/faq/:page', ['controller' => 'Pages', 'action' => 'faq'], 'faq') <add> ->setPatterns(['page' => '[a-z0-9_]+']) <add> ->setHost('docs.example.com'); <add> <add> $routes->post('/articles/:id', ['controller' => 'Articles', 'action' => 'update'], 'article:update') <add> ->setPatterns(['id' => '[0-9]+']) <add> ->setPass(['id']); <add> }); <add> $this->assertCount(2, $this->collection->routes()); <add> $this->assertEquals(['faq', 'article:update'], array_keys($this->collection->named())); <add> $this->assertNotEmpty($this->collection->parse('/faq/things_you_know')); <add> $result = $this->collection->parse('/articles/123'); <add> $this->assertEquals(['123'], $result['pass']); <add> } <ide> }
1
Python
Python
correct an issue if section did not exist
aea9942361b15e6c071bc05a6b07c13053e4086a
<ide><path>glances/config.py <ide> from io import open <ide> import re <ide> <del>from glances.compat import ConfigParser, NoOptionError, system_exec <add>from glances.compat import ConfigParser, NoOptionError, NoSectionError, system_exec <ide> from glances.globals import BSD, LINUX, MACOS, SUNOS, WINDOWS <ide> from glances.logger import logger <ide> <ide> def get_value(self, section, option, <ide> ret = default <ide> try: <ide> ret = self.parser.get(section, option) <del> except NoOptionError: <add> except (NoOptionError, NoSectionError): <ide> pass <ide> <ide> # Search a substring `foo` and replace it by the result of its exec <ide> def get_int_value(self, section, option, default=0): <ide> """Get the int value of an option, if it exists.""" <ide> try: <ide> return self.parser.getint(section, option) <del> except NoOptionError: <add> except (NoOptionError, NoSectionError): <ide> return int(default) <ide> <ide> def get_float_value(self, section, option, default=0.0): <ide> """Get the float value of an option, if it exists.""" <ide> try: <ide> return self.parser.getfloat(section, option) <del> except NoOptionError: <add> except (NoOptionError, NoSectionError): <ide> return float(default) <ide> <ide> def get_bool_value(self, section, option, default=True): <ide> """Get the bool value of an option, if it exists.""" <ide> try: <ide> return self.parser.getboolean(section, option) <del> except NoOptionError: <add> except (NoOptionError, NoSectionError): <ide> return bool(default) <ide><path>glances/plugins/glances_diskio.py <ide> def __init__(self, args=None, config=None): <ide> self.display_curse = True <ide> # Hide stats if it has never been != 0 <ide> self.hide_zero = config.get_bool_value( <del> self.plugin_name, 'hide_zero', default=False) <add> self.plugin_name + 'XXX', 'hide_zero', default=False) <ide> self.hide_zero_fields = ['read_bytes', 'write_bytes'] <ide> <ide> def get_key(self):
2
Python
Python
fix broken test
3d8ee16fde083e712e70b0a8066924e044c8dbe5
<ide><path>libcloud/test/storage/test_aurora.py <ide> # See the License for the specific language governing permissions and <ide> # limitations under the License. <ide> <add>import sys <ide> import unittest <ide> <ide> from libcloud.storage.drivers.auroraobjects import AuroraObjectsStorageDriver <ide> class AuroraObjectsTests(S3Tests, unittest.TestCase): <ide> driver_type = AuroraObjectsStorageDriver <ide> <ide> def setUp(self): <add> super(AuroraObjectsTests, self).setUp() <add> <ide> AuroraObjectsStorageDriver.connectionCls.conn_class = S3MockHttp <ide> S3MockHttp.type = None <ide> self.driver = self.create_driver() <add> <add> <add>if __name__ == '__main__': <add> sys.exit(unittest.main())
1
Python
Python
fix printing of complicated dtypes
984bd6ce8c464f921d273898a2546506778986a7
<ide><path>numpy/core/_internal.py <ide> def _usefields(adict, align): <ide> def _array_descr(descriptor): <ide> fields = descriptor.fields <ide> if fields is None: <del> return descriptor.str <add> subdtype = descriptor.subdtype <add> if subdtype is None: <add> return descriptor.str <add> else: <add> return (_array_descr(subdtype[0]), subdtype[1]) <add> <ide> <ide> names = descriptor.names <ide> ordered_fields = [fields[x] + (x,) for x in names]
1
Mixed
Javascript
handle runtime errors (#268)
c7ba914f527ebcfe084cb0e36946b9dee00d25fa
<ide><path>README.md <ide> import React from 'react' <ide> <ide> export default class Error extends React.Component { <ide> static getInitialProps ({ res, xhr }) { <del> const statusCode = res ? res.statusCode : xhr.status <add> const statusCode = res ? res.statusCode : (xhr ? xhr.status : null) <ide> return { statusCode } <ide> } <ide> <ide> render () { <ide> return ( <del> <p>An error { this.props.statusCode } occurred</p> <add> <p>{ <add> this.props.statusCode <add> ? `An error ${this.props.statusCode} occurred on server` <add> : 'An error occurred on client' <add> ]</p> <ide> ) <ide> } <ide> } <ide><path>client/next.js <ide> import App from '../lib/app' <ide> import evalScript from '../lib/eval-script' <ide> <ide> const { <del> __NEXT_DATA__: { component, props, ids, err } <add> __NEXT_DATA__: { component, errorComponent, props, ids, err } <ide> } = window <ide> <ide> const Component = evalScript(component).default <add>const ErrorComponent = evalScript(errorComponent).default <ide> <del>export const router = new Router(window.location.href, { Component, ctx: { err } }) <add>export const router = new Router(window.location.href, { <add> Component, <add> ErrorComponent, <add> ctx: { err } <add>}) <ide> <ide> const headManager = new HeadManager() <ide> const container = document.getElementById('__next') <ide><path>client/webpack-hot-middleware-client.js <ide> const handlers = { <ide> } <ide> <ide> next.router.reload(route) <add> }, <add> change (route) { <add> const { Component } = next.router.components[route] || {} <add> if (Component && Component.__route === '/_error-debug') { <add> // reload to recover from runtime errors <add> next.router.reload(route) <add> } <ide> } <ide> } <ide> <ide><path>lib/router.js <ide> import evalScript from './eval-script' <ide> import shallowEquals from './shallow-equals' <ide> <ide> export default class Router { <del> constructor (url, initialData) { <add> constructor (url, { Component, ErrorComponent, ctx } = {}) { <ide> const parsed = parse(url, true) <ide> <ide> // represents the current component key <ide> this.route = toRoute(parsed.pathname) <ide> <ide> // set up the component cache (by route keys) <del> this.components = { [this.route]: initialData } <add> this.components = { [this.route]: { Component, ctx } } <ide> <add> this.ErrorComponent = ErrorComponent <ide> this.pathname = parsed.pathname <ide> this.query = parsed.query <ide> this.subscriptions = new Set() <ide> export default class Router { <ide> this.route = route <ide> this.set(getURL(), { ...data, props }) <ide> }) <del> .catch((err) => { <add> .catch(async (err) => { <ide> if (err.cancelled) return <ide> <del> // the only way we can appropriately handle <del> // this failure is deferring to the browser <del> // since the URL has already changed <del> window.location.reload() <add> const data = { Component: this.ErrorComponent, ctx: { err } } <add> const ctx = { ...data.ctx, pathname, query } <add> const props = await this.getInitialProps(data.Component, ctx) <add> <add> this.route = route <add> this.set(getURL(), { ...data, props }) <add> console.error(err) <add> }) <add> .catch((err) => { <add> console.error(err) <ide> }) <ide> } <ide> <ide> export default class Router { <ide> <ide> let data <ide> let props <add> let _err <ide> try { <ide> data = await this.fetchComponent(route) <ide> const ctx = { ...data.ctx, pathname, query } <ide> props = await this.getInitialProps(data.Component, ctx) <ide> } catch (err) { <ide> if (err.cancelled) return false <del> throw err <add> <add> data = { Component: this.ErrorComponent, ctx: { err } } <add> const ctx = { ...data.ctx, pathname, query } <add> props = await this.getInitialProps(data.Component, ctx) <add> <add> _err = err <add> console.error(err) <ide> } <ide> <ide> this.notify({ ...data, props }) <add> <add> if (_err) throw _err <ide> } <ide> <ide> back () { <ide> export default class Router { <ide> <ide> let data <ide> let props <add> let _err <ide> try { <ide> data = await this.fetchComponent(route) <ide> const ctx = { ...data.ctx, pathname, query } <ide> props = await this.getInitialProps(data.Component, ctx) <ide> } catch (err) { <ide> if (err.cancelled) return false <del> throw err <add> <add> data = { Component: this.ErrorComponent, ctx: { err } } <add> const ctx = { ...data.ctx, pathname, query } <add> props = await this.getInitialProps(data.Component, ctx) <add> <add> _err = err <add> console.error(err) <ide> } <ide> <ide> if (getURL() !== url) { <ide> export default class Router { <ide> <ide> this.route = route <ide> this.set(url, { ...data, props }) <add> <add> if (_err) throw _err <add> <ide> return true <ide> } <ide> <ide><path>pages/_error-debug.js <ide> import style from 'next/css' <ide> <ide> export default class ErrorDebug extends React.Component { <ide> static getInitialProps ({ err }) { <del> const { message, module } = err <del> return { message, path: module.rawRequest } <add> const { name, message, stack, module } = err <add> return { name, message, stack, path: module ? module.rawRequest : null } <ide> } <ide> <ide> render () { <del> const { message, path } = this.props <add> const { name, message, stack, path } = this.props <ide> <ide> return <div className={styles.errorDebug}> <ide> <Head> <ide> export default class ErrorDebug extends React.Component { <ide> } <ide> `}} /> <ide> </Head> <del> <div className={styles.heading}>Error in {path}</div> <del> <pre className={styles.message} dangerouslySetInnerHTML={{ __html: ansiHTML(encodeHtml(message)) }} /> <add> {path ? <div className={styles.heading}>Error in {path}</div> : null} <add> { <add> name === 'ModuleBuildError' <add> ? <pre className={styles.message} dangerouslySetInnerHTML={{ __html: ansiHTML(encodeHtml(message)) }} /> <add> : <pre className={styles.message}>{stack}</pre> <add> } <ide> </div> <ide> } <ide> } <ide><path>pages/_error.js <ide> import React from 'react' <del>import style, { merge } from 'next/css' <add>import style from 'next/css' <ide> <ide> export default class Error extends React.Component { <ide> static getInitialProps ({ res, xhr }) { <del> const statusCode = res ? res.statusCode : xhr.status <add> const statusCode = res ? res.statusCode : (xhr ? xhr.status : null) <ide> return { statusCode } <ide> } <ide> <ide> render () { <ide> const { statusCode } = this.props <del> const title = statusCode === 404 ? 'This page could not be found' : 'Internal Server Error' <add> const title = statusCode === 404 <add> ? 'This page could not be found' <add> : (statusCode ? 'Internal Server Error' : 'An unexpected error has occurred') <ide> <del> return <div className={merge(styles.error, styles['error_' + statusCode])}> <add> return <div className={styles.error}> <ide> <div className={styles.text}> <del> <h1 className={styles.h1}>{statusCode}</h1> <add> {statusCode ? <h1 className={styles.h1}>{statusCode}</h1> : null} <ide> <div className={styles.desc}> <ide> <h2 className={styles.h2}>{title}.</h2> <ide> </div> <ide><path>server/hot-reloader.js <ide> export default class HotReloader { <ide> this.prevAssets = null <ide> this.prevChunkNames = null <ide> this.prevFailedChunkNames = null <add> this.prevChunkHashes = null <ide> } <ide> <ide> async run (req, res) { <ide> export default class HotReloader { <ide> .reduce((a, b) => a.concat(b), []) <ide> .map((c) => c.name)) <ide> <add> const chunkHashes = new Map(compilation.chunks.map((c) => [c.name, c.hash])) <add> <ide> if (this.initialized) { <ide> // detect chunks which have to be replaced with a new template <ide> // e.g, pages/index.js <-> pages/_error.js <ide> export default class HotReloader { <ide> const route = toRoute(relative(rootDir, n)) <ide> this.send('reload', route) <ide> } <add> <add> for (const [n, hash] of chunkHashes) { <add> if (!this.prevChunkHashes.has(n)) continue <add> if (this.prevChunkHashes.get(n) === hash) continue <add> <add> const route = toRoute(relative(rootDir, n)) <add> <add> // notify change to recover from runtime errors <add> this.send('change', route) <add> } <ide> } <ide> <ide> this.initialized = true <ide> this.stats = stats <ide> this.compilationErrors = null <ide> this.prevChunkNames = chunkNames <ide> this.prevFailedChunkNames = failedChunkNames <add> this.prevChunkHashes = chunkHashes <ide> }) <ide> <ide> this.webpackDevMiddleware = webpackDevMiddleware(compiler, { <ide><path>server/index.js <ide> export default class Server { <ide> this.run(req, res) <ide> .catch((err) => { <ide> console.error(err) <del> res.status(500) <add> res.statusCode = 500 <ide> res.end('error') <ide> }) <ide> }) <ide> export default class Server { <ide> } <ide> <ide> async render (req, res) { <del> const { dir, dev } = this <ide> const { pathname, query } = parse(req.url, true) <ide> const ctx = { req, res, pathname, query } <del> const opts = { dir, dev } <ide> <del> let html <add> const compilationErr = this.getCompilationError(req.url) <add> if (compilationErr) { <add> await this.doRender(res, 500, '/_error-debug', { ...ctx, err: compilationErr }) <add> return <add> } <add> <add> try { <add> await this.doRender(res, 200, req.url, ctx) <add> } catch (err) { <add> const compilationErr2 = this.getCompilationError('/_error') <add> if (compilationErr2) { <add> await this.doRender(res, 500, '/_error-debug', { ...ctx, err: compilationErr2 }) <add> return <add> } <add> <add> if (err.code !== 'ENOENT') { <add> console.error(err) <add> const url = this.dev ? '/_error-debug' : '/_error' <add> await this.doRender(res, 500, url, { ...ctx, err }) <add> return <add> } <ide> <del> const err = this.getCompilationError(req.url) <del> if (err) { <del> res.statusCode = 500 <del> html = await render('/_error-debug', { ...ctx, err }, opts) <del> } else { <ide> try { <del> html = await render(req.url, ctx, opts) <del> } catch (err) { <del> const _err = this.getCompilationError('/_error') <del> if (_err) { <del> res.statusCode = 500 <del> html = await render('/_error-debug', { ...ctx, err: _err }, opts) <add> await this.doRender(res, 404, '/_error', { ...ctx, err }) <add> } catch (err2) { <add> if (this.dev) { <add> await this.doRender(res, 500, '/_error-debug', { ...ctx, err: err2 }) <ide> } else { <del> if (err.code === 'ENOENT') { <del> res.statusCode = 404 <del> } else { <del> console.error(err) <del> res.statusCode = 500 <del> } <del> html = await render('/_error', { ...ctx, err }, opts) <add> throw err2 <ide> } <ide> } <ide> } <add> } <add> <add> async doRender (res, statusCode, url, ctx) { <add> const { dir, dev } = this <add> <add> // need to set statusCode before `render` <add> // since it can be used on getInitialProps <add> res.statusCode = statusCode <ide> <add> const html = await render(url, ctx, { dir, dev }) <ide> sendHTML(res, html) <ide> } <ide> <ide> async renderJSON (req, res) { <del> const { dir } = this <del> const opts = { dir } <add> const compilationErr = this.getCompilationError(req.url) <add> if (compilationErr) { <add> await this.doRenderJSON(res, 500, '/_error-debug.json', compilationErr) <add> return <add> } <ide> <del> let json <add> try { <add> await this.doRenderJSON(res, 200, req.url) <add> } catch (err) { <add> const compilationErr2 = this.getCompilationError('/_error.json') <add> if (compilationErr2) { <add> await this.doRenderJSON(res, 500, '/_error-debug.json', compilationErr2) <add> return <add> } <ide> <del> const err = this.getCompilationError(req.url) <del> if (err) { <del> res.statusCode = 500 <del> json = await renderJSON('/_error-debug.json', opts) <del> json = { ...json, err: errorToJSON(err) } <del> } else { <del> try { <del> json = await renderJSON(req.url, opts) <del> } catch (err) { <del> const _err = this.getCompilationError('/_error.json') <del> if (_err) { <del> res.statusCode = 500 <del> json = await renderJSON('/_error-debug.json', opts) <del> json = { ...json, err: errorToJSON(_err) } <del> } else { <del> if (err.code === 'ENOENT') { <del> res.statusCode = 404 <del> } else { <del> console.error(err) <del> res.statusCode = 500 <del> } <del> json = await renderJSON('/_error.json', opts) <del> } <add> if (err.code === 'ENOENT') { <add> await this.doRenderJSON(res, 404, '/_error.json') <add> } else { <add> console.error(err) <add> await this.doRenderJSON(res, 500, '/_error.json') <ide> } <ide> } <add> } <add> <add> async doRenderJSON (res, statusCode, url, err) { <add> const { dir } = this <add> const json = await renderJSON(url, { dir }) <add> if (err) { <add> json.err = errorToJSON(err) <add> } <ide> <ide> const data = JSON.stringify(json) <ide> res.setHeader('Content-Type', 'application/json') <ide> res.setHeader('Content-Length', Buffer.byteLength(data)) <add> res.statusCode = statusCode <ide> res.end(data) <ide> } <ide> <ide> async render404 (req, res) { <del> const { dir, dev } = this <ide> const { pathname, query } = parse(req.url, true) <ide> const ctx = { req, res, pathname, query } <del> const opts = { dir, dev } <ide> <del> let html <del> <del> const err = this.getCompilationError('/_error') <del> if (err) { <del> res.statusCode = 500 <del> html = await render('/_error-debug', { ...ctx, err }, opts) <del> } else { <del> res.statusCode = 404 <del> html = await render('/_error', ctx, opts) <add> const compilationErr = this.getCompilationError('/_error') <add> if (compilationErr) { <add> await this.doRender(res, 500, '/_error-debug', { ...ctx, err: compilationErr }) <add> return <ide> } <ide> <del> sendHTML(res, html) <add> try { <add> await this.doRender(res, 404, '/_error', ctx) <add> } catch (err) { <add> if (this.dev) { <add> await this.doRender(res, 500, '/_error-debug', { ...ctx, err }) <add> } else { <add> throw err <add> } <add> } <ide> } <ide> <ide> serveStatic (req, res, path) { <ide><path>server/render.js <ide> export async function render (url, ctx = {}, { <ide> const mod = await requireModule(join(dir, '.next', 'dist', 'pages', path)) <ide> const Component = mod.default || mod <ide> <del> const props = await (Component.getInitialProps ? Component.getInitialProps(ctx) : {}) <del> const component = await read(join(dir, '.next', 'bundles', 'pages', path)) <add> const [ <add> props, <add> component, <add> errorComponent <add> ] = await Promise.all([ <add> Component.getInitialProps ? Component.getInitialProps(ctx) : {}, <add> read(join(dir, '.next', 'bundles', 'pages', path)), <add> read(join(dir, '.next', 'bundles', 'pages', dev ? '_error-debug' : '_error')) <add> ]) <ide> <ide> const { html, css, ids } = renderStatic(() => { <ide> const app = createElement(App, { <ide> export async function render (url, ctx = {}, { <ide> css, <ide> data: { <ide> component, <add> errorComponent, <ide> props, <ide> ids: ids, <ide> err: (ctx.err && dev) ? errorToJSON(ctx.err) : null
9
Javascript
Javascript
use proper clearcoat variable
b5c1e4d96e962e09646ab29e507a3bb1a2b23725
<ide><path>src/renderers/shaders/ShaderLib/meshphysical.glsl.js <ide> void main() { <ide> <ide> vec3 Fcc = F_Schlick( material.clearcoatF0, material.clearcoatF90, dotNVcc ); <ide> <del> outgoingLight = outgoingLight * ( 1.0 - clearcoat * Fcc ) + clearcoatSpecular * clearcoat; <add> outgoingLight = outgoingLight * ( 1.0 - material.clearcoat * Fcc ) + clearcoatSpecular * material.clearcoat; <ide> <ide> #endif <ide>
1
Python
Python
fix coercion of unicode object to chararray
1857cf27034c639a27044308c39211a811678054
<ide><path>numpy/core/defchararray.py <ide> class adds the following functionality: <ide> ucs4 = numpy.array(ucs2, 'u4') <ide> obj = ucs4.data <ide> else: <del> obj = unicode(obj) <add> obj = _unicode(obj) <ide> else: <ide> # Let the default Unicode -> string encoding (if any) take <ide> # precedence.
1
Java
Java
add photoviewer component
d403ac6a3147784787fb0602ed62ef37cd65fd71
<ide><path>ReactAndroid/src/main/java/com/facebook/react/views/image/ImageLoadEvent.java <ide> <ide> package com.facebook.react.views.image; <ide> <add>import javax.annotation.Nullable; <add> <ide> import android.support.annotation.IntDef; <ide> <add>import com.facebook.react.bridge.Arguments; <add>import com.facebook.react.bridge.WritableMap; <ide> import com.facebook.react.uimanager.events.Event; <ide> import com.facebook.react.uimanager.events.RCTEventEmitter; <ide> <ide> public class ImageLoadEvent extends Event<ImageLoadEvent> { <ide> public static final int ON_PROGRESS = 5; <ide> <ide> private final int mEventType; <add> private final @Nullable String mImageUri; <ide> <ide> public ImageLoadEvent(int viewId, long timestampMs, @ImageEventType int eventType) { <add> this(viewId, timestampMs, eventType, null); <add> } <add> <add> public ImageLoadEvent( <add> int viewId, <add> long timestampMs, <add> @ImageEventType int eventType, <add> @Nullable String imageUri) { <ide> super(viewId, timestampMs); <ide> mEventType = eventType; <add> mImageUri = imageUri; <ide> } <ide> <ide> public static String eventNameForType(@ImageEventType int eventType) { <ide> public short getCoalescingKey() { <ide> <ide> @Override <ide> public void dispatch(RCTEventEmitter rctEventEmitter) { <del> rctEventEmitter.receiveEvent(getViewTag(), getEventName(), null); <add> WritableMap eventData = null; <add> if (mImageUri != null) { <add> eventData = Arguments.createMap(); <add> eventData.putString("uri", mImageUri); <add> } <add> rctEventEmitter.receiveEvent(getViewTag(), getEventName(), eventData); <ide> } <ide> }
1
Ruby
Ruby
return delete result instead arel object
324f265c1de98212f59f42c287d441b85b2350b7
<ide><path>lib/arel/session.rb <ide> def read(select) <ide> <ide> def update(update) <ide> update.call <del> update <ide> end <ide> <ide> def delete(delete) <ide> delete.call <del> delete <ide> end <ide> end <ide> include CRUD
1
Javascript
Javascript
change calls to deprecated util.print()
306936e98fa6c06b52095f5a369a03c6e722b285
<ide><path>test/fixtures/net-fd-passing-receiver.js <ide> receiver = net.createServer(function(socket) { <ide> <ide> /* To signal the test runne we're up and listening */ <ide> receiver.on('listening', function() { <del> common.print('ready'); <add> console.log('ready'); <ide> }); <ide> <ide> receiver.listen(path); <ide><path>test/pummel/test-net-many-clients.js <ide> for (var i = 0; i < bytes; i++) { <ide> var server = net.createServer(function(c) { <ide> console.log('connected'); <ide> total_connections++; <del> common.print('#'); <add> console.log('#'); <ide> c.write(body); <ide> c.end(); <ide> }); <ide> function runClient(callback) { <ide> client.setEncoding('utf8'); <ide> <ide> client.on('connect', function() { <del> common.print('c'); <add> console.log('c'); <ide> client.recved = ''; <ide> client.connections += 1; <ide> }); <ide> function runClient(callback) { <ide> }); <ide> <ide> client.on('close', function(had_error) { <del> common.print('.'); <add> console.log('.'); <ide> assert.equal(false, had_error); <ide> assert.equal(bytes, client.recved.length); <ide> <ide><path>test/pummel/test-net-pause.js <ide> server.on('listening', function() { <ide> var client = net.createConnection(common.PORT); <ide> client.setEncoding('ascii'); <ide> client.on('data', function(d) { <del> common.print(d); <add> console.log(d); <ide> recv += d; <ide> }); <ide> <ide><path>test/sequential/test-stdout-to-file.js <ide> function test(size, useBuffer, cb) { <ide> fs.unlinkSync(tmpFile); <ide> } catch (e) {} <ide> <del> common.print(size + ' chars to ' + tmpFile + '...'); <add> console.log(size + ' chars to ' + tmpFile + '...'); <ide> <ide> childProcess.exec(cmd, function(err) { <ide> if (err) throw err;
4
Java
Java
remove xresource classes
68a9a66d27386e0fce7196f2797446676f1206a6
<ide><path>src/main/java/io/reactivex/Completable.java <ide> public interface CompletableTransformer extends Function<Completable, Completabl <ide> * @return the source or its wrapper Completable <ide> * @throws NullPointerException if source is null <ide> */ <del> static Completable wrap(CompletableConsumable source) { <add> public static Completable wrap(CompletableConsumable source) { <ide> Objects.requireNonNull(source, "source is null"); <ide> if (source instanceof Completable) { <ide> return (Completable)source; <ide> public static Completable concat(Publisher<? extends CompletableConsumable> sour <ide> * when the Completable is subscribed to. <ide> * @return the created Completable instance <ide> * @throws NullPointerException if onSubscribe is null <del> * @deprecated <ide> */ <ide> @SchedulerSupport(SchedulerSupport.NONE) <del> @Deprecated // FIXME temporary <ide> public static Completable create(CompletableConsumable onSubscribe) { <ide> Objects.requireNonNull(onSubscribe, "onSubscribe is null"); <ide> if (onSubscribe instanceof Completable) { <ide> public final Throwable get(long timeout, TimeUnit unit) { <ide> * @throws NullPointerException if onLift is null <ide> */ <ide> @SchedulerSupport(SchedulerSupport.NONE) <del> @Deprecated // FIXME temporary <ide> public final Completable lift(final CompletableOperator onLift) { <ide> Objects.requireNonNull(onLift, "onLift is null"); <ide> return new CompletableLift(this, onLift); <ide><path>src/main/java/io/reactivex/disposables/CompositeDisposable.java <ide> import java.util.*; <ide> <ide> import io.reactivex.exceptions.CompositeException; <add>import io.reactivex.internal.disposables.DisposableContainer; <ide> import io.reactivex.internal.functions.Objects; <ide> import io.reactivex.internal.util.*; <ide> <ide> /** <ide> * A disposable container that can hold onto multiple other disposables. <ide> */ <del>public final class CompositeDisposable implements Disposable { <add>public final class CompositeDisposable implements Disposable, DisposableContainer { <ide> <ide> OpenHashSet<Disposable> resources; <ide> <ide> public void clear() { <ide> dispose(set); <ide> } <ide> <add> public int size() { <add> if (disposed) { <add> return 0; <add> } <add> synchronized (this) { <add> if (disposed) { <add> return 0; <add> } <add> return resources.size(); <add> } <add> } <add> <ide> void dispose(OpenHashSet<Disposable> set) { <ide> if (set == null) { <ide> return; <ide><path>src/main/java/io/reactivex/disposables/Disposables.java <ide> <ide> package io.reactivex.disposables; <ide> <del>import io.reactivex.functions.Consumer; <del>import io.reactivex.internal.disposables.EmptyDisposable; <del>import io.reactivex.internal.functions.Functions; <ide> import java.util.concurrent.Future; <add> <ide> import org.reactivestreams.Subscription; <ide> <add>import io.reactivex.internal.disposables.EmptyDisposable; <add>import io.reactivex.internal.functions.Functions; <add> <ide> /** <ide> * Utility class to help create disposables by wrapping <ide> * other types. <ide> public static Disposable empty() { <ide> public static Disposable disposed() { <ide> return EmptyDisposable.INSTANCE; <ide> } <del> <del> private static final Consumer<Disposable> DISPOSER = new Consumer<Disposable>() { <del> @Override <del> public void accept(Disposable d) { <del> d.dispose(); <del> } <del> }; <del> <del> /** <del> * Returns a consumer that calls dispose on the received Disposable. <del> * @return the consumer that calls dispose on the received Disposable. <del> * @deprecated that generic resource management will be removed <del> */ <del> @Deprecated <del> public static Consumer<Disposable> consumeAndDispose() { <del> return DISPOSER; <del> } <ide> } <ide><path>src/main/java/io/reactivex/disposables/FutureDisposable.java <ide> import java.util.concurrent.Future; <ide> <ide> final class FutureDisposable extends ReferenceDisposable<Future<?>> { <add> /** */ <add> private static final long serialVersionUID = 6545242830671168775L; <add> <ide> private final boolean allowInterrupt; <ide> <ide> FutureDisposable(Future<?> run, boolean allowInterrupt) { <ide><path>src/main/java/io/reactivex/disposables/RefCountDisposable.java <ide> public boolean isDisposed() { <ide> } <ide> <ide> static final class InnerDisposable extends ReferenceDisposable<RefCountDisposable> { <add> /** */ <add> private static final long serialVersionUID = -6066815451193282256L; <add> <ide> InnerDisposable(RefCountDisposable parent) { <ide> super(parent); <ide> } <ide><path>src/main/java/io/reactivex/disposables/ReferenceDisposable.java <ide> import java.util.concurrent.atomic.AtomicReference; <ide> <ide> abstract class ReferenceDisposable<T> extends AtomicReference<T> implements Disposable { <add> /** */ <add> private static final long serialVersionUID = 6537757548749041217L; <add> <ide> ReferenceDisposable(T value) { <ide> super(Objects.requireNonNull(value, "value is null")); <ide> } <ide><path>src/main/java/io/reactivex/disposables/RunnableDisposable.java <ide> package io.reactivex.disposables; <ide> <ide> final class RunnableDisposable extends ReferenceDisposable<Runnable> { <add> /** */ <add> private static final long serialVersionUID = -8219729196779211169L; <add> <ide> RunnableDisposable(Runnable value) { <ide> super(value); <ide> } <ide><path>src/main/java/io/reactivex/disposables/SubscriptionDisposable.java <ide> import org.reactivestreams.Subscription; <ide> <ide> final class SubscriptionDisposable extends ReferenceDisposable<Subscription> { <add> /** */ <add> private static final long serialVersionUID = -707001650852963139L; <add> <ide> SubscriptionDisposable(Subscription value) { <ide> super(value); <ide> } <add><path>src/main/java/io/reactivex/internal/disposables/ArrayCompositeDisposable.java <del><path>src/main/java/io/reactivex/internal/disposables/ArrayCompositeResource.java <ide> import java.util.concurrent.atomic.AtomicReferenceArray; <ide> <ide> import io.reactivex.disposables.Disposable; <del>import io.reactivex.functions.Consumer; <ide> <ide> /** <del> * A composite resource with a fixed number of slots. <add> * A composite disposable with a fixed number of slots. <ide> * <ide> * <p>Note that since the implementation leaks the methods of AtomicReferenceArray, one must be <ide> * careful to only call setResource, replaceResource and dispose on it. All other methods may lead to undefined behavior <ide> * and should be used by internal means only. <del> * <del> * @param <T> the resource tpye <del> * @deprecated Use more type-specific and inlined resource management <ide> */ <del>@Deprecated <del>public final class ArrayCompositeResource<T> extends AtomicReferenceArray<Object> implements Disposable { <add>public final class ArrayCompositeDisposable extends AtomicReferenceArray<Disposable> implements Disposable { <ide> /** */ <ide> private static final long serialVersionUID = 2746389416410565408L; <ide> <del> final Consumer<? super T> disposer; <del> <del> static final Object DISPOSED = new Object(); <del> <del> public ArrayCompositeResource(int capacity, Consumer<? super T> disposer) { <add> public ArrayCompositeDisposable(int capacity) { <ide> super(capacity); <del> this.disposer = disposer; <ide> } <ide> <ide> /** <ide> public ArrayCompositeResource(int capacity, Consumer<? super T> disposer) { <ide> * @param resource <ide> * @return true if the resource has ben set, false if the composite has been disposed <ide> */ <del> @SuppressWarnings("unchecked") <del> public boolean setResource(int index, T resource) { <add> public boolean setResource(int index, Disposable resource) { <ide> for (;;) { <del> Object o = get(index); <del> if (o == DISPOSED) { <del> disposer.accept(resource); <add> Disposable o = get(index); <add> if (o == DisposableHelper.DISPOSED) { <add> resource.dispose();; <ide> return false; <ide> } <ide> if (compareAndSet(index, o, resource)) { <ide> if (o != null) { <del> disposer.accept((T)o); <add> o.dispose(); <ide> } <ide> return true; <ide> } <ide> public boolean setResource(int index, T resource) { <ide> * @param resource <ide> * @return the old resource, can be null <ide> */ <del> @SuppressWarnings("unchecked") <del> public T replaceResource(int index, T resource) { <add> public Disposable replaceResource(int index, Disposable resource) { <ide> for (;;) { <del> Object o = get(index); <del> if (o == DISPOSED) { <del> disposer.accept(resource); <add> Disposable o = get(index); <add> if (o == DisposableHelper.DISPOSED) { <add> resource.dispose(); <ide> return null; <ide> } <ide> if (compareAndSet(index, o, resource)) { <del> return (T)o; <add> return o; <ide> } <ide> } <ide> } <ide> <ide> @Override <del> @SuppressWarnings("unchecked") <ide> public void dispose() { <del> if (get(0) != DISPOSED) { <add> if (get(0) != DisposableHelper.DISPOSED) { <ide> int s = length(); <ide> for (int i = 0; i < s; i++) { <del> Object o = get(i); <del> if (o != DISPOSED) { <del> o = getAndSet(i, DISPOSED); <del> if (o != DISPOSED && o != null) { <del> disposer.accept((T)o); <add> Disposable o = get(i); <add> if (o != DisposableHelper.DISPOSED) { <add> o = getAndSet(i, DisposableHelper.DISPOSED); <add> if (o != DisposableHelper.DISPOSED && o != null) { <add> o.dispose(); <ide> } <ide> } <ide> } <ide> public void dispose() { <ide> <ide> @Override <ide> public boolean isDisposed() { <del> return get(0) == DISPOSED; <add> return get(0) == DisposableHelper.DISPOSED; <ide> } <ide> } <add><path>src/main/java/io/reactivex/internal/disposables/DisposableContainer.java <del><path>src/main/java/io/reactivex/internal/disposables/CompositeResource.java <ide> <ide> package io.reactivex.internal.disposables; <ide> <del>public interface CompositeResource<T> { <add>import io.reactivex.disposables.Disposable; <add> <add>/** <add> * Common interface to add and remove disposables from a container. <add> */ <add>public interface DisposableContainer { <ide> <del> boolean add(T resource); <add> boolean add(Disposable d); <ide> <del> boolean remove(T resource); <add> boolean remove(Disposable d); <ide> <del> boolean delete(T resource); <add> boolean delete(Disposable d); <ide> } <ide><path>src/main/java/io/reactivex/internal/disposables/ListCompositeDisposable.java <add>/** <add> * Copyright 2016 Netflix, Inc. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <add> * compliance with the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software distributed under the License is <add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <add> * the License for the specific language governing permissions and limitations under the License. <add> */ <add>package io.reactivex.internal.disposables; <add> <add>import java.util.*; <add> <add>import io.reactivex.disposables.*; <add>import io.reactivex.exceptions.CompositeException; <add>import io.reactivex.internal.functions.Objects; <add>import io.reactivex.internal.util.*; <add> <add>/** <add> * A disposable container that can hold onto multiple other disposables. <add> */ <add>public final class ListCompositeDisposable implements Disposable, DisposableContainer { <add> <add> List<Disposable> resources; <add> <add> volatile boolean disposed; <add> <add> public ListCompositeDisposable() { <add> } <add> <add> public ListCompositeDisposable(Disposable... resources) { <add> Objects.requireNonNull(resources, "resources is null"); <add> this.resources = new LinkedList<Disposable>(); <add> for (Disposable d : resources) { <add> Objects.requireNonNull(d, "Disposable item is null"); <add> this.resources.add(d); <add> } <add> } <add> <add> public ListCompositeDisposable(Iterable<? extends Disposable> resources) { <add> Objects.requireNonNull(resources, "resources is null"); <add> for (Disposable d : resources) { <add> Objects.requireNonNull(d, "Disposable item is null"); <add> this.resources.add(d); <add> } <add> } <add> <add> @Override <add> public void dispose() { <add> if (disposed) { <add> return; <add> } <add> List<Disposable> set; <add> synchronized (this) { <add> if (disposed) { <add> return; <add> } <add> disposed = true; <add> set = resources; <add> resources = null; <add> } <add> <add> dispose(set); <add> } <add> <add> @Override <add> public boolean isDisposed() { <add> return disposed; <add> } <add> <add> public boolean add(Disposable d) { <add> Objects.requireNonNull(d, "d is null"); <add> if (!disposed) { <add> synchronized (this) { <add> if (!disposed) { <add> List<Disposable> set = resources; <add> if (set == null) { <add> set = new LinkedList<Disposable>(); <add> resources = set; <add> } <add> set.add(d); <add> return true; <add> } <add> } <add> } <add> d.dispose(); <add> return false; <add> } <add> <add> public boolean addAll(Disposable... ds) { <add> Objects.requireNonNull(ds, "ds is null"); <add> if (!disposed) { <add> synchronized (this) { <add> if (!disposed) { <add> List<Disposable> set = resources; <add> if (set == null) { <add> set = new LinkedList<Disposable>(); <add> resources = set; <add> } <add> for (Disposable d : ds) { <add> Objects.requireNonNull(d, "d is null"); <add> set.add(d); <add> } <add> return true; <add> } <add> } <add> } <add> for (Disposable d : ds) { <add> d.dispose(); <add> } <add> return false; <add> } <add> <add> public boolean remove(Disposable d) { <add> if (delete(d)) { <add> d.dispose(); <add> return true; <add> } <add> return false; <add> } <add> <add> public boolean delete(Disposable d) { <add> Objects.requireNonNull(d, "Disposable item is null"); <add> if (disposed) { <add> return false; <add> } <add> synchronized (this) { <add> if (disposed) { <add> return false; <add> } <add> <add> List<Disposable> set = resources; <add> if (set == null || !set.remove(d)) { <add> return false; <add> } <add> } <add> return true; <add> } <add> <add> public void clear() { <add> if (disposed) { <add> return; <add> } <add> List<Disposable> set; <add> synchronized (this) { <add> if (disposed) { <add> return; <add> } <add> <add> set = resources; <add> resources = null; <add> } <add> <add> dispose(set); <add> } <add> <add> void dispose(List<Disposable> set) { <add> if (set == null) { <add> return; <add> } <add> List<Throwable> errors = null; <add> for (Disposable o : set) { <add> try { <add> o.dispose(); <add> } catch (Throwable ex) { <add> if (errors == null) { <add> errors = new ArrayList<Throwable>(); <add> } <add> errors.add(ex); <add> } <add> } <add> if (errors != null) { <add> if (errors.size() == 1) { <add> throw Exceptions.propagate(errors.get(0)); <add> } <add> throw new CompositeException(errors); <add> } <add> } <add>} <ide><path>src/main/java/io/reactivex/internal/disposables/ListCompositeResource.java <del>/** <del> * Copyright 2016 Netflix, Inc. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <del> * compliance with the License. You may obtain a copy of the License at <del> * <del> * http://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software distributed under the License is <del> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <del> * the License for the specific language governing permissions and limitations under the License. <del> */ <del> <del>package io.reactivex.internal.disposables; <del> <del>import java.util.LinkedList; <del> <del>import io.reactivex.disposables.Disposable; <del>import io.reactivex.functions.Consumer; <del> <del>/** <del> * A linked-list-based composite resource with custom disposer callback. <del> * <del> * @param <T> the resource type <del> * @deprecated Use more type-specific and inlined resource management <del> */ <del>@Deprecated <del>public final class ListCompositeResource<T> implements CompositeResource<T>, Disposable { <del> final Consumer<? super T> disposer; <del> <del> /** Indicates this resource has been disposed. */ <del> volatile boolean disposed; <del> <del> /** The set of resources, accessed while holding a lock on this. */ <del> LinkedList<T> list; <del> <del> public ListCompositeResource(Consumer<? super T> disposer) { <del> this.disposer = disposer; <del> } <del> <del> public ListCompositeResource(Consumer<? super T> disposer, T... initialResources) { <del> this(disposer); <del> int n = initialResources.length; <del> if (n != 0) { <del> list = new LinkedList<T>(); <del> for (T r : initialResources) { <del> list.add(r); <del> } <del> } <del> } <del> <del> public ListCompositeResource(Consumer<? super T> disposer, Iterable<? extends T> initialResources) { <del> this(disposer); <del> list = new LinkedList<T>(); <del> for (T r : initialResources) { <del> list.add(r); <del> } <del> } <del> <del> /** <del> * Adds a new resource to this composite or disposes it if the composite has been disposed. <del> * @param newResource the new resource to add, not-null (not checked) <del> * @return false if the container is disposed <del> */ <del> @Override <del> public boolean add(T newResource) { <del> if (!disposed) { <del> synchronized (this) { <del> if (!disposed) { <del> LinkedList<T> a = list; <del> if (a == null) { <del> a = new LinkedList<T>(); <del> list = a; <del> } <del> a.add(newResource); <del> return true; <del> } <del> } <del> } <del> disposer.accept(newResource); <del> return false; <del> } <del> <del> /** <del> * Removes the given resource from this composite and calls the disposer if the resource <del> * was indeed in the composite. <del> * @param resource the resource to remove, not-null (not verified) <del> * @return false if the resource was not in this container <del> */ <del> @Override <del> public boolean remove(T resource) { <del> if (delete(resource)) { <del> disposer.accept(resource); <del> return true; <del> } <del> return false; <del> } <del> <del> /** <del> * Removes the given resource if contained within this composite but doesn't call the disposer for it. <del> * @param resource the resource to delete, not-null (not verified) <del> * @return false if the resource was not in this container <del> */ <del> @Override <del> public boolean delete(T resource) { <del> if (disposed) { <del> return false; <del> } <del> synchronized (this) { <del> if (disposed) { <del> return false; <del> } <del> LinkedList<T> a = list; <del> if (a == null || a.isEmpty()) { <del> return false; <del> } <del> <del> return a.remove(resource); <del> } <del> } <del> <del> @Override <del> public void dispose() { <del> if (!disposed) { <del> LinkedList<T> s; <del> synchronized (this) { <del> if (disposed) { <del> return; <del> } <del> disposed = true; <del> s = list; <del> list = null; <del> } <del> if (s != null) { <del> for (T t : s) { <del> disposer.accept(t); <del> } <del> } <del> } <del> } <del> <del> @Override <del> public boolean isDisposed() { <del> return disposed; <del> } <del>} <ide><path>src/main/java/io/reactivex/internal/disposables/MultipleAssignmentResource.java <del>/** <del> * Copyright 2016 Netflix, Inc. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <del> * compliance with the License. You may obtain a copy of the License at <del> * <del> * http://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software distributed under the License is <del> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <del> * the License for the specific language governing permissions and limitations under the License. <del> */ <del> <del>package io.reactivex.internal.disposables; <del> <del>import java.util.concurrent.atomic.AtomicReference; <del> <del>import io.reactivex.disposables.Disposable; <del>import io.reactivex.functions.Consumer; <del>import io.reactivex.internal.util.TerminalAtomicsHelper; <del> <del>/** <del> * Holds onto resources with a custom disposer callback and replacing a resource doesn't <del> * call the disposer but only when the MultipleAssignmentResource is disposed. <del> * <del> * <p>This resource container disposable helps in avoiding the wrapping of other resources <del> * into Disposables. <del> * <del> * <p>Note that since the implementation leaks the methods of AtomicReference, one must be <del> * careful to only call setResource and dispose on it. All other methods may lead to undefined behavior <del> * and should be used by internal means only. <del> * <del> * @param <T> the resource type <del> * @deprecated Use more type-specific and inlined resource management <del> */ <del>@Deprecated <del>public final class MultipleAssignmentResource<T> extends AtomicReference<Object> implements Disposable { <del> /** */ <del> private static final long serialVersionUID = 5247635821051810205L; <del> /** The callback to dispose the resource. */ <del> final Consumer<? super T> disposer; <del> /** The indicator object that this container has been disposed. */ <del> static final Object DISPOSED = new Object(); <del> <del> /** <del> * Constructor with a custom disposer callback. <del> * @param disposer <del> */ <del> public MultipleAssignmentResource(Consumer<? super T> disposer) { <del> this.disposer = disposer; <del> } <del> <del> /** <del> * Constructor with a custom disposer callback and the initial resource <del> * @param disposer <del> * @param initialResource <del> */ <del> public MultipleAssignmentResource(Consumer<? super T> disposer, T initialResource) { <del> this(disposer); <del> lazySet(initialResource); <del> } <del> <del> /** <del> * Atomically replaces the current resource with the new resource but doesn't call the disposer <del> * for it. <del> * @param newResource the new resource to replace the old one <del> */ <del> @SuppressWarnings("unchecked") <del> public void setResource(T newResource) { <del> TerminalAtomicsHelper.update(this, newResource, DISPOSED, (Consumer<Object>)disposer); <del> } <del> <del> /** <del> * Returns the current held resource or null if no resource <del> * is set or the container has been disposed. <del> * @return the currently held resource <del> */ <del> @SuppressWarnings("unchecked") <del> public T getResource() { <del> Object d = get(); <del> if (d == DISPOSED) { <del> return null; <del> } <del> return (T)d; <del> } <del> <del> @Override <del> @SuppressWarnings("unchecked") <del> public void dispose() { <del> TerminalAtomicsHelper.terminate(this, DISPOSED, (Consumer<Object>)disposer); <del> } <del> <del> /** <del> * Returns true if this resource has been disposed. <del> * @return true if this resource has been disposed <del> */ <del> @Override <del> public boolean isDisposed() { <del> return get() == DISPOSED; <del> } <del>} <ide><path>src/main/java/io/reactivex/internal/disposables/SetCompositeResource.java <del>/** <del> * Copyright 2016 Netflix, Inc. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <del> * compliance with the License. You may obtain a copy of the License at <del> * <del> * http://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software distributed under the License is <del> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <del> * the License for the specific language governing permissions and limitations under the License. <del> */ <del> <del>package io.reactivex.internal.disposables; <del> <del>import io.reactivex.disposables.Disposable; <del>import io.reactivex.functions.Consumer; <del>import io.reactivex.internal.util.*; <del> <del>/** <del> * A set-based composite resource with custom disposer callback. <del> * <del> * @param <T> the resource type <del> * @deprecated Use more type-specific and inlined resource management <del> */ <del>@Deprecated <del>public final class SetCompositeResource<T> implements CompositeResource<T>, Disposable { <del> final Consumer<? super T> disposer; <del> <del> /** Indicates this resource has been disposed. */ <del> volatile boolean disposed; <del> <del> /** The set of resources, accessed while holding a lock on this. */ <del> OpenHashSet<T> set; <del> <del> public SetCompositeResource(Consumer<? super T> disposer) { <del> this.disposer = disposer; <del> } <del> <del> public SetCompositeResource(Consumer<? super T> disposer, T... initialResources) { <del> this(disposer); <del> int n = initialResources.length; <del> if (n != 0) { <del> set = new OpenHashSet<T>(n); <del> for (T r : initialResources) { <del> set.add(r); <del> } <del> } <del> } <del> <del> public SetCompositeResource(Consumer<? super T> disposer, Iterable<? extends T> initialResources) { <del> this(disposer); <del> set = new OpenHashSet<T>(); <del> for (T r : initialResources) { <del> set.add(r); <del> } <del> } <del> <del> /** <del> * Adds a new resource to this composite or disposes it if the composite has been disposed. <del> * @param newResource the new resource to add, not-null (not checked) <del> * @return true if the add succeeded, false if this container was disposed <del> */ <del> @Override <del> public boolean add(T newResource) { <del> if (!disposed) { <del> synchronized (this) { <del> if (!disposed) { <del> OpenHashSet<T> a = set; <del> if (a == null) { <del> a = new OpenHashSet<T>(4); <del> set = a; <del> } <del> a.add(newResource); <del> return true; <del> } <del> } <del> } <del> disposer.accept(newResource); <del> return false; <del> } <del> <del> /** <del> * Removes the given resource from this composite and calls the disposer if the resource <del> * was indeed in the composite. <del> * @param resource the resource to remove, not-null (not verified) <del> * @return true if the resource was removed, false otherwise <del> */ <del> @Override <del> public boolean remove(T resource) { <del> if (delete(resource)) { <del> disposer.accept(resource); <del> return true; <del> } <del> return false; <del> } <del> <del> /** <del> * Removes the given resource if contained within this composite but doesn't call the disposer for it. <del> * @param resource the resource to delete, not-null (not verified) <del> * @return true if the delete succeeded, false if this container was disposed <del> */ <del> @Override <del> public boolean delete(T resource) { <del> if (disposed) { <del> return false; <del> } <del> synchronized (this) { <del> if (disposed) { <del> return false; <del> } <del> OpenHashSet<T> a = set; <del> if (a == null || a.isEmpty()) { <del> return false; <del> } <del> <del> return a.remove(resource); <del> } <del> } <del> <del> public int size() { <del> synchronized (this) { <del> OpenHashSet<T> a = set; <del> if (a == null) { <del> return 0; <del> } <del> final int[] c = new int[1]; <del> a.forEach(new Consumer<T>() { <del> @Override <del> public void accept(T v) { <del> c[0]++; <del> } <del> }); <del> return c[0]; <del> } <del> } <del> <del> @Override <del> public void dispose() { <del> if (!disposed) { <del> OpenHashSet<T> s; <del> synchronized (this) { <del> if (disposed) { <del> return; <del> } <del> disposed = true; <del> s = set; <del> set = null; <del> } <del> if (s != null) { <del> disposeAll(s); <del> } <del> } <del> } <del> <del> @Override <del> public boolean isDisposed() { <del> return disposed; <del> } <del> <del> public void clear() { <del> if (!disposed) { <del> OpenHashSet<T> s; <del> synchronized (this) { <del> if (disposed) { <del> return; <del> } <del> s = set; <del> set = null; <del> } <del> if (s != null) { <del> disposeAll(s); <del> } <del> } <del> } <del> void disposeAll(OpenHashSet<T> s) { <del> Throwable ex = s.forEachSuppress(disposer); <del> if (ex != null) { <del> Exceptions.propagate(ex); <del> } <del> } <del>} <ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableMerge.java <ide> import io.reactivex.*; <ide> import io.reactivex.disposables.*; <ide> import io.reactivex.exceptions.CompositeException; <del>import io.reactivex.internal.disposables.SetCompositeResource; <ide> import io.reactivex.internal.subscriptions.SubscriptionHelper; <ide> import io.reactivex.plugins.RxJavaPlugins; <ide> <ide> public void subscribeActual(CompletableSubscriber s) { <ide> private static final long serialVersionUID = -2108443387387077490L; <ide> <ide> final CompletableSubscriber actual; <del> final SetCompositeResource<Disposable> set; <add> final CompositeDisposable set; <ide> final int maxConcurrency; <ide> final boolean delayErrors; <ide> <ide> public CompletableMergeSubscriber(CompletableSubscriber actual, int maxConcurren <ide> this.actual = actual; <ide> this.maxConcurrency = maxConcurrency; <ide> this.delayErrors = delayErrors; <del> this.set = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> this.set = new CompositeDisposable(); <ide> lazySet(1); <ide> } <ide> <ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableObserveOn.java <ide> package io.reactivex.internal.operators.completable; <ide> <ide> import io.reactivex.*; <del>import io.reactivex.disposables.*; <del>import io.reactivex.internal.disposables.ArrayCompositeResource; <add>import io.reactivex.disposables.Disposable; <add>import io.reactivex.internal.disposables.ArrayCompositeDisposable; <ide> <ide> public final class CompletableObserveOn extends Completable { <ide> <ide> public CompletableObserveOn(CompletableConsumable source, Scheduler scheduler) { <ide> @Override <ide> protected void subscribeActual(final CompletableSubscriber s) { <ide> <del> final ArrayCompositeResource<Disposable> ad = new ArrayCompositeResource<Disposable>(2, Disposables.consumeAndDispose()); <add> final ArrayCompositeDisposable ad = new ArrayCompositeDisposable(2); <ide> final Scheduler.Worker w = scheduler.createWorker(); <ide> ad.set(0, w); <ide> <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableBufferBoundary.java <ide> import io.reactivex.Flowable; <ide> import io.reactivex.disposables.*; <ide> import io.reactivex.functions.*; <del>import io.reactivex.internal.disposables.SetCompositeResource; <ide> import io.reactivex.internal.queue.MpscLinkedQueue; <ide> import io.reactivex.internal.subscribers.flowable.*; <ide> import io.reactivex.internal.subscriptions.SubscriptionHelper; <ide> protected void subscribeActual(Subscriber<? super U> s) { <ide> final Publisher<? extends Open> bufferOpen; <ide> final Function<? super Open, ? extends Publisher<? extends Close>> bufferClose; <ide> final Supplier<U> bufferSupplier; <del> final SetCompositeResource<Disposable> resources; <add> final CompositeDisposable resources; <ide> <ide> Subscription s; <ide> <ide> public BufferBoundarySubscriber(Subscriber<? super U> actual, <ide> this.bufferClose = bufferClose; <ide> this.bufferSupplier = bufferSupplier; <ide> this.buffers = new LinkedList<U>(); <del> this.resources = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> this.resources = new CompositeDisposable(); <ide> } <ide> @Override <ide> public void onSubscribe(Subscription s) { <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableIntervalRange.java <ide> import io.reactivex.internal.disposables.DisposableHelper; <ide> import io.reactivex.internal.subscriptions.SubscriptionHelper; <ide> import io.reactivex.internal.util.BackpressureHelper; <del>import io.reactivex.plugins.RxJavaPlugins; <ide> <ide> public final class FlowableIntervalRange extends Flowable<Long> { <ide> final Scheduler scheduler; <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowablePublish.java <ide> public static <T, R> Flowable<R> create(final Flowable<? extends T> source, <ide> public void subscribe(Subscriber<? super R> sr) { <ide> ConnectableFlowable<T> op = create(source, bufferSize); <ide> <del> final SubscriberResourceWrapper<R, Disposable> srw = new SubscriberResourceWrapper<R, Disposable>(sr, Disposables.consumeAndDispose()); <add> final SubscriberResourceWrapper<R> srw = new SubscriberResourceWrapper<R>(sr); <ide> <ide> selector.apply(op).subscribe(srw); <ide> <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableRefCount.java <ide> import io.reactivex.disposables.*; <ide> import io.reactivex.flowables.ConnectableFlowable; <ide> import io.reactivex.functions.Consumer; <del>import io.reactivex.internal.disposables.SetCompositeResource; <ide> import io.reactivex.internal.subscriptions.SubscriptionHelper; <ide> <ide> /** <ide> <ide> final class ConnectionSubscriber implements Subscriber<T>, Subscription { <ide> final Subscriber<? super T> subscriber; <del> final SetCompositeResource<Disposable> currentBase; <add> final CompositeDisposable currentBase; <ide> final Disposable resource; <ide> <ide> Subscription s; <ide> <ide> private ConnectionSubscriber(Subscriber<? super T> subscriber, <del> SetCompositeResource<Disposable> currentBase, Disposable resource) { <add> CompositeDisposable currentBase, Disposable resource) { <ide> this.subscriber = subscriber; <ide> this.currentBase = currentBase; <ide> this.resource = resource; <ide> void cleanup() { <ide> try { <ide> if (baseSubscription == currentBase) { <ide> baseSubscription.dispose(); <del> baseSubscription = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> baseSubscription = new CompositeDisposable(); <ide> subscriptionCount.set(0); <ide> } <ide> } finally { <ide> void cleanup() { <ide> } <ide> <ide> final ConnectableFlowable<? extends T> source; <del> volatile SetCompositeResource<Disposable> baseSubscription = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> volatile CompositeDisposable baseSubscription = new CompositeDisposable(); <ide> final AtomicInteger subscriptionCount = new AtomicInteger(0); <ide> <ide> /** <ide> public void accept(Disposable subscription) { <ide> }; <ide> } <ide> <del> void doSubscribe(final Subscriber<? super T> subscriber, final SetCompositeResource<Disposable> currentBase) { <add> void doSubscribe(final Subscriber<? super T> subscriber, final CompositeDisposable currentBase) { <ide> // handle unsubscribing from the base subscription <ide> Disposable d = disconnect(currentBase); <ide> <ide> void doSubscribe(final Subscriber<? super T> subscriber, final SetCompositeResou <ide> source.unsafeSubscribe(s); <ide> } <ide> <del> private Disposable disconnect(final SetCompositeResource<Disposable> current) { <add> private Disposable disconnect(final CompositeDisposable current) { <ide> return Disposables.from(new Runnable() { <ide> @Override <ide> public void run() { <ide> public void run() { <ide> baseSubscription.dispose(); <ide> // need a new baseSubscription because once <ide> // unsubscribed stays that way <del> baseSubscription = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> baseSubscription = new CompositeDisposable(); <ide> } <ide> } <ide> } finally { <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableReplay.java <ide> public void subscribe(Subscriber<? super R> child) { <ide> return; <ide> } <ide> <del> final SubscriberResourceWrapper<R, Disposable> srw = new SubscriberResourceWrapper<R, Disposable>(child, Disposables.consumeAndDispose()); <add> final SubscriberResourceWrapper<R> srw = new SubscriberResourceWrapper<R>(child); <ide> <ide> observable.subscribe(srw); <ide> <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableSequenceEqual.java <ide> <ide> import io.reactivex.Flowable; <ide> import io.reactivex.functions.BiPredicate; <del>import io.reactivex.internal.disposables.ArrayCompositeResource; <ide> import io.reactivex.internal.queue.*; <del>import io.reactivex.internal.subscriptions.SubscriptionHelper; <add>import io.reactivex.internal.subscriptions.*; <ide> import io.reactivex.internal.util.Pow2; <ide> <ide> public final class FlowableSequenceEqual<T> extends Flowable<Boolean> { <ide> public void subscribeActual(Subscriber<? super Boolean> s) { <ide> private static final long serialVersionUID = -6178010334400373240L; <ide> final Subscriber<? super Boolean> actual; <ide> final BiPredicate<? super T, ? super T> comparer; <del> final ArrayCompositeResource<Subscription> resources; <add> final ArrayCompositeSubscription resources; <ide> final Publisher<? extends T> first; <ide> final Publisher<? extends T> second; <ide> final EqualSubscriber<T>[] subscribers; <ide> public EqualCoordinator(Subscriber<? super Boolean> actual, int bufferSize, <ide> this.subscribers = as; <ide> as[0] = new EqualSubscriber<T>(this, 0, bufferSize); <ide> as[1] = new EqualSubscriber<T>(this, 1, bufferSize); <del> this.resources = new ArrayCompositeResource<Subscription>(2, SubscriptionHelper.consumeAndCancel()); <add> this.resources = new ArrayCompositeSubscription(2); <ide> } <ide> <ide> boolean setSubscription(Subscription s, int index) { <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableSkipUntil.java <ide> import org.reactivestreams.*; <ide> <ide> import io.reactivex.Flowable; <del>import io.reactivex.internal.disposables.ArrayCompositeResource; <ide> import io.reactivex.internal.subscriptions.*; <ide> import io.reactivex.subscribers.SerializedSubscriber; <ide> <ide> public FlowableSkipUntil(Publisher<T> source, Publisher<U> other) { <ide> protected void subscribeActual(Subscriber<? super T> child) { <ide> final SerializedSubscriber<T> serial = new SerializedSubscriber<T>(child); <ide> <del> final ArrayCompositeResource<Subscription> frc = new ArrayCompositeResource<Subscription>(2, SubscriptionHelper.consumeAndCancel()); <add> final ArrayCompositeSubscription frc = new ArrayCompositeSubscription(2); <ide> <ide> final SkipUntilSubscriber<T> sus = new SkipUntilSubscriber<T>(serial, frc); <ide> <ide> public void onComplete() { <ide> /** */ <ide> private static final long serialVersionUID = -1113667257122396604L; <ide> final Subscriber<? super T> actual; <del> final ArrayCompositeResource<Subscription> frc; <add> final ArrayCompositeSubscription frc; <ide> <ide> Subscription s; <ide> <ide> volatile boolean notSkipping; <ide> boolean notSkippingLocal; <ide> <del> public SkipUntilSubscriber(Subscriber<? super T> actual, ArrayCompositeResource<Subscription> frc) { <add> public SkipUntilSubscriber(Subscriber<? super T> actual, ArrayCompositeSubscription frc) { <ide> this.actual = actual; <ide> this.frc = frc; <ide> } <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableTakeUntil.java <ide> import org.reactivestreams.*; <ide> <ide> import io.reactivex.Flowable; <del>import io.reactivex.internal.disposables.ArrayCompositeResource; <ide> import io.reactivex.internal.subscriptions.*; <ide> import io.reactivex.subscribers.SerializedSubscriber; <ide> <ide> public FlowableTakeUntil(Publisher<T> source, Publisher<? extends U> other) { <ide> protected void subscribeActual(Subscriber<? super T> child) { <ide> final SerializedSubscriber<T> serial = new SerializedSubscriber<T>(child); <ide> <del> final ArrayCompositeResource<Subscription> frc = new ArrayCompositeResource<Subscription>(2, SubscriptionHelper.consumeAndCancel()); <add> final ArrayCompositeSubscription frc = new ArrayCompositeSubscription(2); <ide> <ide> final TakeUntilSubscriber<T> tus = new TakeUntilSubscriber<T>(serial, frc); <ide> <ide> public void onComplete() { <ide> /** */ <ide> private static final long serialVersionUID = 3451719290311127173L; <ide> final Subscriber<? super T> actual; <del> final ArrayCompositeResource<Subscription> frc; <add> final ArrayCompositeSubscription frc; <ide> <ide> Subscription s; <ide> <del> public TakeUntilSubscriber(Subscriber<? super T> actual, ArrayCompositeResource<Subscription> frc) { <add> public TakeUntilSubscriber(Subscriber<? super T> actual, ArrayCompositeSubscription frc) { <ide> this.actual = actual; <ide> this.frc = frc; <ide> } <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableTimer.java <ide> <ide> import io.reactivex.*; <ide> import io.reactivex.disposables.Disposable; <del>import io.reactivex.internal.disposables.EmptyDisposable; <del>import io.reactivex.internal.disposables.DisposableHelper; <add>import io.reactivex.internal.disposables.*; <ide> import io.reactivex.internal.subscriptions.SubscriptionHelper; <del>import io.reactivex.plugins.RxJavaPlugins; <ide> <ide> public final class FlowableTimer extends Flowable<Long> { <ide> final Scheduler scheduler; <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableWindowBoundarySelector.java <ide> <ide> package io.reactivex.internal.operators.flowable; <ide> <del>import io.reactivex.internal.disposables.DisposableHelper; <ide> import java.util.*; <ide> import java.util.concurrent.atomic.*; <ide> <ide> import io.reactivex.Flowable; <ide> import io.reactivex.disposables.*; <ide> import io.reactivex.functions.Function; <del>import io.reactivex.internal.disposables.SetCompositeResource; <add>import io.reactivex.internal.disposables.DisposableHelper; <ide> import io.reactivex.internal.queue.MpscLinkedQueue; <ide> import io.reactivex.internal.subscribers.flowable.*; <ide> import io.reactivex.internal.subscriptions.SubscriptionHelper; <ide> protected void subscribeActual(Subscriber<? super Flowable<T>> s) { <ide> final Publisher<B> open; <ide> final Function<? super B, ? extends Publisher<V>> close; <ide> final int bufferSize; <del> final SetCompositeResource<Disposable> resources; <add> final CompositeDisposable resources; <ide> <ide> Subscription s; <ide> <ide> public WindowBoundaryMainSubscriber(Subscriber<? super Flowable<T>> actual, <ide> this.open = open; <ide> this.close = close; <ide> this.bufferSize = bufferSize; <del> this.resources = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> this.resources = new CompositeDisposable(); <ide> this.ws = new ArrayList<UnicastProcessor<T>>(); <ide> windows.lazySet(1); <ide> } <ide><path>src/main/java/io/reactivex/internal/operators/observable/NbpOperatorPublish.java <ide> public static <T, R> Observable<R> create(final ObservableConsumable<? extends T <ide> public void subscribe(Observer<? super R> sr) { <ide> ConnectableObservable<T> op = create(source, bufferSize); <ide> <del> final ObserverResourceWrapper<R, Disposable> srw = new ObserverResourceWrapper<R, Disposable>(sr, Disposables.consumeAndDispose()); <add> final ObserverResourceWrapper<R> srw = new ObserverResourceWrapper<R>(sr); <ide> <ide> selector.apply(op).subscribe(srw); <ide> <ide><path>src/main/java/io/reactivex/internal/operators/observable/NbpOperatorReplay.java <ide> public void subscribe(Observer<? super R> child) { <ide> return; <ide> } <ide> <del> final ObserverResourceWrapper<R, Disposable> srw = new ObserverResourceWrapper<R, Disposable>(child, Disposables.consumeAndDispose()); <add> final ObserverResourceWrapper<R> srw = new ObserverResourceWrapper<R>(child); <ide> <ide> observable.subscribe(srw); <ide> <ide><path>src/main/java/io/reactivex/internal/operators/observable/NbpOperatorSkipUntil.java <ide> <ide> import io.reactivex.*; <ide> import io.reactivex.Observable.NbpOperator; <del>import io.reactivex.disposables.*; <add>import io.reactivex.disposables.Disposable; <ide> import io.reactivex.internal.disposables.*; <ide> import io.reactivex.observers.SerializedObserver; <ide> <ide> public Observer<? super T> apply(Observer<? super T> child) { <ide> <ide> final SerializedObserver<T> serial = new SerializedObserver<T>(child); <ide> <del> final ArrayCompositeResource<Disposable> frc = new ArrayCompositeResource<Disposable>(2, Disposables.consumeAndDispose()); <add> final ArrayCompositeDisposable frc = new ArrayCompositeDisposable(2); <ide> <ide> final SkipUntilSubscriber<T> sus = new SkipUntilSubscriber<T>(serial, frc); <ide> <ide> public void onComplete() { <ide> /** */ <ide> private static final long serialVersionUID = -1113667257122396604L; <ide> final Observer<? super T> actual; <del> final ArrayCompositeResource<Disposable> frc; <add> final ArrayCompositeDisposable frc; <ide> <ide> Disposable s; <ide> <ide> volatile boolean notSkipping; <ide> boolean notSkippingLocal; <ide> <del> public SkipUntilSubscriber(Observer<? super T> actual, ArrayCompositeResource<Disposable> frc) { <add> public SkipUntilSubscriber(Observer<? super T> actual, ArrayCompositeDisposable frc) { <ide> this.actual = actual; <ide> this.frc = frc; <ide> } <ide><path>src/main/java/io/reactivex/internal/operators/observable/NbpOperatorTakeUntil.java <ide> <ide> import io.reactivex.*; <ide> import io.reactivex.Observable.NbpOperator; <del>import io.reactivex.disposables.*; <add>import io.reactivex.disposables.Disposable; <ide> import io.reactivex.internal.disposables.*; <del>import io.reactivex.internal.subscriptions.SubscriptionHelper; <ide> import io.reactivex.observers.SerializedObserver; <ide> <ide> public final class NbpOperatorTakeUntil<T, U> implements NbpOperator<T, T> { <ide> public NbpOperatorTakeUntil(ObservableConsumable<? extends U> other) { <ide> public Observer<? super T> apply(Observer<? super T> child) { <ide> final SerializedObserver<T> serial = new SerializedObserver<T>(child); <ide> <del> final ArrayCompositeResource<Disposable> frc = new ArrayCompositeResource<Disposable>(2, Disposables.consumeAndDispose()); <add> final ArrayCompositeDisposable frc = new ArrayCompositeDisposable(2); <ide> <ide> final TakeUntilSubscriber<T> tus = new TakeUntilSubscriber<T>(serial, frc); <ide> <ide> public void onComplete() { <ide> /** */ <ide> private static final long serialVersionUID = 3451719290311127173L; <ide> final Observer<? super T> actual; <del> final ArrayCompositeResource<Disposable> frc; <add> final ArrayCompositeDisposable frc; <ide> <ide> Disposable s; <ide> <del> public TakeUntilSubscriber(Observer<? super T> actual, ArrayCompositeResource<Disposable> frc) { <add> public TakeUntilSubscriber(Observer<? super T> actual, ArrayCompositeDisposable frc) { <ide> this.actual = actual; <ide> this.frc = frc; <ide> } <ide><path>src/main/java/io/reactivex/internal/operators/observable/NbpOperatorTimeout.java <ide> public Observer<? super T> apply(Observer<? super T> t) { <ide> static final class TimeoutSubscriber<T, U, V> <ide> extends AtomicReference<Disposable> <ide> implements Observer<T>, Disposable, OnTimeout { <add> /** */ <add> private static final long serialVersionUID = 2672739326310051084L; <ide> final Observer<? super T> actual; <ide> final Supplier<? extends ObservableConsumable<U>> firstTimeoutSelector; <ide> final Function<? super T, ? extends ObservableConsumable<V>> timeoutSelector; <ide> public void onComplete() { <ide> static final class TimeoutOtherSubscriber<T, U, V> <ide> extends AtomicReference<Disposable> <ide> implements Observer<T>, Disposable, OnTimeout { <add> /** */ <add> private static final long serialVersionUID = -1957813281749686898L; <ide> final Observer<? super T> actual; <ide> final Supplier<? extends ObservableConsumable<U>> firstTimeoutSelector; <ide> final Function<? super T, ? extends ObservableConsumable<V>> timeoutSelector; <ide><path>src/main/java/io/reactivex/internal/operators/observable/NbpOperatorWindowBoundarySelector.java <ide> import io.reactivex.Observer; <ide> import io.reactivex.disposables.*; <ide> import io.reactivex.functions.Function; <del>import io.reactivex.internal.disposables.*; <add>import io.reactivex.internal.disposables.DisposableHelper; <ide> import io.reactivex.internal.queue.MpscLinkedQueue; <ide> import io.reactivex.internal.subscribers.observable.*; <ide> import io.reactivex.internal.util.NotificationLite; <ide> public Observer<? super T> apply(Observer<? super Observable<T>> t) { <ide> final ObservableConsumable<B> open; <ide> final Function<? super B, ? extends ObservableConsumable<V>> close; <ide> final int bufferSize; <del> final SetCompositeResource<Disposable> resources; <add> final CompositeDisposable resources; <ide> <ide> Disposable s; <ide> <ide> public WindowBoundaryMainSubscriber(Observer<? super Observable<T>> actual, <ide> this.open = open; <ide> this.close = close; <ide> this.bufferSize = bufferSize; <del> this.resources = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> this.resources = new CompositeDisposable(); <ide> this.ws = new ArrayList<UnicastSubject<T>>(); <ide> windows.lazySet(1); <ide> } <ide><path>src/main/java/io/reactivex/internal/operators/observable/ObservableInterval.java <ide> import io.reactivex.*; <ide> import io.reactivex.disposables.Disposable; <ide> import io.reactivex.internal.disposables.DisposableHelper; <del>import io.reactivex.plugins.RxJavaPlugins; <ide> <ide> public final class ObservableInterval extends Observable<Long> { <ide> final Scheduler scheduler; <ide> public void subscribeActual(Observer<? super Long> s) { <ide> extends AtomicReference<Disposable> <ide> implements Disposable, Runnable { <ide> <add> /** */ <add> private static final long serialVersionUID = 346773832286157679L; <add> <ide> final Observer<? super Long> actual; <ide> <ide> long count; <ide><path>src/main/java/io/reactivex/internal/operators/observable/ObservableIntervalRange.java <ide> import io.reactivex.*; <ide> import io.reactivex.disposables.Disposable; <ide> import io.reactivex.internal.disposables.DisposableHelper; <del>import io.reactivex.plugins.RxJavaPlugins; <ide> <ide> public final class ObservableIntervalRange extends Observable<Long> { <ide> final Scheduler scheduler; <ide> public void subscribeActual(Observer<? super Long> s) { <ide> extends AtomicReference<Disposable> <ide> implements Disposable, Runnable { <ide> <add> /** */ <add> private static final long serialVersionUID = 1891866368734007884L; <add> <ide> final Observer<? super Long> actual; <ide> final long end; <ide> <ide><path>src/main/java/io/reactivex/internal/operators/observable/ObservableRefCount.java <ide> import io.reactivex.*; <ide> import io.reactivex.disposables.*; <ide> import io.reactivex.functions.Consumer; <del>import io.reactivex.internal.disposables.*; <add>import io.reactivex.internal.disposables.DisposableHelper; <ide> import io.reactivex.observables.ConnectableObservable; <ide> <ide> /** <ide> <ide> final class ConnectionSubscriber implements Observer<T>, Disposable { <ide> final Observer<? super T> subscriber; <del> final SetCompositeResource<Disposable> currentBase; <add> final CompositeDisposable currentBase; <ide> final Disposable resource; <ide> <ide> Disposable s; <ide> <ide> private ConnectionSubscriber(Observer<? super T> subscriber, <del> SetCompositeResource<Disposable> currentBase, Disposable resource) { <add> CompositeDisposable currentBase, Disposable resource) { <ide> this.subscriber = subscriber; <ide> this.currentBase = currentBase; <ide> this.resource = resource; <ide> void cleanup() { <ide> try { <ide> if (baseSubscription == currentBase) { <ide> baseSubscription.dispose(); <del> baseSubscription = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> baseSubscription = new CompositeDisposable(); <ide> subscriptionCount.set(0); <ide> } <ide> } finally { <ide> void cleanup() { <ide> <ide> final ConnectableObservable<? extends T> source; <ide> <del> volatile SetCompositeResource<Disposable> baseSubscription = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> volatile CompositeDisposable baseSubscription = new CompositeDisposable(); <ide> <ide> final AtomicInteger subscriptionCount = new AtomicInteger(); <ide> <ide> public void accept(Disposable subscription) { <ide> }; <ide> } <ide> <del> void doSubscribe(final Observer<? super T> subscriber, final SetCompositeResource<Disposable> currentBase) { <add> void doSubscribe(final Observer<? super T> subscriber, final CompositeDisposable currentBase) { <ide> // handle unsubscribing from the base subscription <ide> Disposable d = disconnect(currentBase); <ide> <ide> void doSubscribe(final Observer<? super T> subscriber, final SetCompositeResourc <ide> source.unsafeSubscribe(s); <ide> } <ide> <del> private Disposable disconnect(final SetCompositeResource<Disposable> current) { <add> private Disposable disconnect(final CompositeDisposable current) { <ide> return Disposables.from(new Runnable() { <ide> @Override <ide> public void run() { <ide> public void run() { <ide> baseSubscription.dispose(); <ide> // need a new baseSubscription because once <ide> // unsubscribed stays that way <del> baseSubscription = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> baseSubscription = new CompositeDisposable(); <ide> } <ide> } <ide> } finally { <ide><path>src/main/java/io/reactivex/internal/operators/observable/ObservableSequenceEqual.java <ide> import io.reactivex.*; <ide> import io.reactivex.disposables.*; <ide> import io.reactivex.functions.BiPredicate; <del>import io.reactivex.internal.disposables.ArrayCompositeResource; <add>import io.reactivex.internal.disposables.ArrayCompositeDisposable; <ide> import io.reactivex.internal.queue.SpscLinkedArrayQueue; <ide> <ide> public final class ObservableSequenceEqual<T> extends Observable<Boolean> { <ide> public void subscribeActual(Observer<? super Boolean> s) { <ide> private static final long serialVersionUID = -6178010334400373240L; <ide> final Observer<? super Boolean> actual; <ide> final BiPredicate<? super T, ? super T> comparer; <del> final ArrayCompositeResource<Disposable> resources; <add> final ArrayCompositeDisposable resources; <ide> final ObservableConsumable<? extends T> first; <ide> final ObservableConsumable<? extends T> second; <ide> final EqualSubscriber<T>[] subscribers; <ide> public EqualCoordinator(Observer<? super Boolean> actual, int bufferSize, <ide> this.subscribers = as; <ide> as[0] = new EqualSubscriber<T>(this, 0, bufferSize); <ide> as[1] = new EqualSubscriber<T>(this, 1, bufferSize); <del> this.resources = new ArrayCompositeResource<Disposable>(2, Disposables.consumeAndDispose()); <add> this.resources = new ArrayCompositeDisposable(2); <ide> } <ide> <ide> boolean setSubscription(Disposable s, int index) { <ide><path>src/main/java/io/reactivex/internal/operators/observable/ObserverResourceWrapper.java <ide> <ide> import io.reactivex.Observer; <ide> import io.reactivex.disposables.Disposable; <del>import io.reactivex.functions.Consumer; <ide> import io.reactivex.internal.disposables.DisposableHelper; <ide> <del>public final class ObserverResourceWrapper<T, R> extends AtomicReference<Object> implements Observer<T>, Disposable { <add>public final class ObserverResourceWrapper<T> extends AtomicReference<Disposable> implements Observer<T>, Disposable { <ide> /** */ <ide> private static final long serialVersionUID = -8612022020200669122L; <ide> <ide> final Observer<? super T> actual; <del> final Consumer<? super R> disposer; <ide> <ide> final AtomicReference<Disposable> subscription = new AtomicReference<Disposable>(); <ide> <del> private static final Object TERMINATED = new Object(); <del> <del> public ObserverResourceWrapper(Observer<? super T> actual, Consumer<? super R> disposer) { <add> public ObserverResourceWrapper(Observer<? super T> actual) { <ide> this.actual = actual; <del> this.disposer = disposer; <ide> } <ide> <ide> @Override <ide> public void onComplete() { <ide> } <ide> <ide> @Override <del> @SuppressWarnings("unchecked") <ide> public void dispose() { <ide> DisposableHelper.dispose(subscription); <ide> <del> Object o = get(); <del> if (o != TERMINATED) { <del> o = getAndSet(TERMINATED); <del> if (o != TERMINATED && o != null) { <del> disposer.accept((R)o); <del> } <del> } <add> DisposableHelper.dispose(this); <ide> } <ide> <ide> @Override <ide> public boolean isDisposed() { <ide> return subscription.get() == DisposableHelper.DISPOSED; <ide> } <ide> <del> @SuppressWarnings("unchecked") <del> public void setResource(R resource) { <del> for (;;) { <del> Object r = get(); <del> if (r == TERMINATED) { <del> disposer.accept(resource); <del> return; <del> } <del> if (compareAndSet(r, resource)) { <del> if (r != null) { <del> disposer.accept((R)r); <del> } <del> return; <del> } <del> } <add> public void setResource(Disposable resource) { <add> DisposableHelper.set(this, resource); <ide> } <ide> } <ide><path>src/main/java/io/reactivex/internal/schedulers/ComputationScheduler.java <ide> public void shutdown() { <ide> <ide> <ide> private static class EventLoopWorker extends Scheduler.Worker { <del> private final ListCompositeResource<Disposable> serial; <del> private final SetCompositeResource<Disposable> timed; <del> private final ArrayCompositeResource<Disposable> both; <add> private final ListCompositeDisposable serial; <add> private final CompositeDisposable timed; <add> private final ListCompositeDisposable both; <ide> private final PoolWorker poolWorker; <ide> <ide> volatile boolean disposed; <ide> <ide> EventLoopWorker(PoolWorker poolWorker) { <ide> this.poolWorker = poolWorker; <del> this.serial = new ListCompositeResource<Disposable>(Disposables.consumeAndDispose()); <del> this.timed = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <del> this.both = new ArrayCompositeResource<Disposable>(2, Disposables.consumeAndDispose()); <del> this.both.lazySet(0, serial); <del> this.both.lazySet(1, timed); <add> this.serial = new ListCompositeDisposable(); <add> this.timed = new CompositeDisposable(); <add> this.both = new ListCompositeDisposable(); <add> this.both.add(serial); <add> this.both.add(timed); <ide> } <ide> <ide> @Override <ide><path>src/main/java/io/reactivex/internal/schedulers/ExecutorScheduler.java <ide> public Disposable scheduleDirect(Runnable run, long delay, TimeUnit unit) { <ide> return EmptyDisposable.INSTANCE; <ide> } <ide> } <del> MultipleAssignmentResource<Disposable> first = new MultipleAssignmentResource<Disposable>(Disposables.consumeAndDispose()); <add> SerialDisposable first = new SerialDisposable(); <ide> <del> final MultipleAssignmentResource<Disposable> mar = new MultipleAssignmentResource<Disposable>(Disposables.consumeAndDispose(), first); <add> final SerialDisposable mar = new SerialDisposable(first); <ide> <ide> Disposable delayed = HELPER.scheduleDirect(new Runnable() { <ide> @Override <ide> public void run() { <del> mar.setResource(scheduleDirect(decoratedRun)); <add> mar.replace(scheduleDirect(decoratedRun)); <ide> } <ide> }, delay, unit); <ide> <del> first.setResource(delayed); <add> first.replace(delayed); <ide> <ide> return mar; <ide> } <ide> public static final class ExecutorWorker extends Scheduler.Worker implements Run <ide> <ide> final AtomicInteger wip = new AtomicInteger(); <ide> <del> final SetCompositeResource<Disposable> tasks = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> final CompositeDisposable tasks = new CompositeDisposable(); <ide> <ide> public ExecutorWorker(Executor executor) { <ide> this.executor = executor; <ide> public Disposable schedule(Runnable run, long delay, TimeUnit unit) { <ide> } <ide> <ide> <del> MultipleAssignmentResource<Disposable> first = new MultipleAssignmentResource<Disposable>(Disposables.consumeAndDispose()); <add> SerialDisposable first = new SerialDisposable(); <ide> <del> final MultipleAssignmentResource<Disposable> mar = new MultipleAssignmentResource<Disposable>(Disposables.consumeAndDispose(), first); <add> final SerialDisposable mar = new SerialDisposable(first); <ide> <ide> final Runnable decoratedRun = RxJavaPlugins.onSchedule(run); <ide> <ide> ScheduledRunnable sr = new ScheduledRunnable(new Runnable() { <ide> @Override <ide> public void run() { <del> mar.setResource(schedule(decoratedRun)); <add> mar.replace(schedule(decoratedRun)); <ide> } <ide> }, tasks); <ide> tasks.add(sr); <ide> public Object get(long timeout, TimeUnit unit) <ide> }); <ide> } <ide> <del> first.setResource(sr); <add> first.replace(sr); <ide> <ide> return mar; <ide> } <ide><path>src/main/java/io/reactivex/internal/schedulers/IoScheduler.java <ide> public final class IoScheduler extends Scheduler implements SchedulerLifecycle { <ide> private static final class CachedWorkerPool { <ide> private final long keepAliveTime; <ide> private final ConcurrentLinkedQueue<ThreadWorker> expiringWorkerQueue; <del> private final SetCompositeResource<Disposable> allWorkers; <add> private final CompositeDisposable allWorkers; <ide> private final ScheduledExecutorService evictorService; <ide> private final Future<?> evictorTask; <ide> <ide> CachedWorkerPool(long keepAliveTime, TimeUnit unit) { <ide> this.keepAliveTime = unit != null ? unit.toNanos(keepAliveTime) : 0L; <ide> this.expiringWorkerQueue = new ConcurrentLinkedQueue<ThreadWorker>(); <del> this.allWorkers = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> this.allWorkers = new CompositeDisposable(); <ide> <ide> ScheduledExecutorService evictor = null; <ide> Future<?> task = null; <ide> public int size() { <ide> } <ide> <ide> private static final class EventLoopWorker extends Scheduler.Worker { <del> private final SetCompositeResource<Disposable> tasks; <add> private final CompositeDisposable tasks; <ide> private final CachedWorkerPool pool; <ide> private final ThreadWorker threadWorker; <ide> <ide> final AtomicBoolean once = new AtomicBoolean(); <ide> <ide> EventLoopWorker(CachedWorkerPool pool) { <ide> this.pool = pool; <del> this.tasks = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> this.tasks = new CompositeDisposable(); <ide> this.threadWorker = pool.get(); <ide> } <ide> <ide><path>src/main/java/io/reactivex/internal/schedulers/NewThreadWorker.java <ide> public Disposable schedulePeriodicallyDirect(final Runnable run, long initialDel <ide> * @param parent the optional tracker parent to add the created ScheduledRunnable instance to before it gets scheduled <ide> * @return the ScheduledRunnable instance <ide> */ <del> public ScheduledRunnable scheduleActual(final Runnable run, long delayTime, TimeUnit unit, CompositeResource<Disposable> parent) { <add> public ScheduledRunnable scheduleActual(final Runnable run, long delayTime, TimeUnit unit, DisposableContainer parent) { <ide> Runnable decoratedRun = RxJavaPlugins.onSchedule(run); <ide> <ide> ScheduledRunnable sr = new ScheduledRunnable(decoratedRun, parent); <ide><path>src/main/java/io/reactivex/internal/schedulers/ScheduledRunnable.java <ide> import java.util.concurrent.Future; <ide> import java.util.concurrent.atomic.AtomicReferenceArray; <ide> <del>import io.reactivex.disposables.Disposable; <del>import io.reactivex.internal.disposables.CompositeResource; <add>import io.reactivex.disposables.*; <add>import io.reactivex.internal.disposables.DisposableContainer; <ide> import io.reactivex.plugins.RxJavaPlugins; <ide> <ide> public final class ScheduledRunnable extends AtomicReferenceArray<Object> implements Runnable, Disposable { <ide> public final class ScheduledRunnable extends AtomicReferenceArray<Object> implem <ide> * @param actual the runnable to wrap, not-null (not verified) <ide> * @param parent the parent tracking container or null if none <ide> */ <del> public ScheduledRunnable(Runnable actual, CompositeResource<Disposable> parent) { <add> public ScheduledRunnable(Runnable actual, DisposableContainer parent) { <ide> super(2); <ide> this.actual = actual; <ide> this.lazySet(0, parent); <ide> } <ide> <ide> @Override <del> @SuppressWarnings("unchecked") <ide> public void run() { <ide> try { <ide> actual.run(); <ide> public void run() { <ide> if (o != DISPOSED && o != null) { <ide> // done races with dispose here <ide> if (compareAndSet(PARENT_INDEX, o, DONE)) { <del> ((CompositeResource<Disposable>)o).delete(this); <add> ((DisposableContainer)o).delete(this); <ide> } <ide> } <ide> <ide> public boolean wasScheduled() { <ide> } <ide> <ide> @Override <del> @SuppressWarnings("unchecked") <ide> public void dispose() { <ide> for (;;) { <ide> Object o = get(FUTURE_INDEX); <ide> public void dispose() { <ide> break; <ide> } <ide> if (compareAndSet(PARENT_INDEX, o, DISPOSED)) { <del> ((CompositeResource<Disposable>)o).delete(this); <add> ((DisposableContainer)o).delete(this); <ide> return; <ide> } <ide> } <ide><path>src/main/java/io/reactivex/internal/schedulers/SingleScheduler.java <ide> <ide> import io.reactivex.Scheduler; <ide> import io.reactivex.disposables.*; <del>import io.reactivex.internal.disposables.*; <add>import io.reactivex.internal.disposables.EmptyDisposable; <ide> import io.reactivex.plugins.RxJavaPlugins; <ide> <ide> public final class SingleScheduler extends Scheduler { <ide> static final class ScheduledWorker extends Scheduler.Worker { <ide> <ide> final ScheduledExecutorService executor; <ide> <del> final SetCompositeResource<Disposable> tasks; <add> final CompositeDisposable tasks; <ide> <ide> volatile boolean disposed; <ide> <ide> public ScheduledWorker(ScheduledExecutorService executor) { <ide> this.executor = executor; <del> this.tasks = new SetCompositeResource<Disposable>(Disposables.consumeAndDispose()); <add> this.tasks = new CompositeDisposable(); <ide> } <ide> <ide> @Override <ide><path>src/main/java/io/reactivex/internal/subscribers/flowable/SubscriberResourceWrapper.java <ide> import org.reactivestreams.*; <ide> <ide> import io.reactivex.disposables.Disposable; <del>import io.reactivex.functions.Consumer; <add>import io.reactivex.internal.disposables.DisposableHelper; <ide> import io.reactivex.internal.subscriptions.SubscriptionHelper; <del>import io.reactivex.plugins.RxJavaPlugins; <ide> <del>public final class SubscriberResourceWrapper<T, R> extends AtomicReference<Object> implements Subscriber<T>, Disposable, Subscription { <add>public final class SubscriberResourceWrapper<T> extends AtomicReference<Disposable> implements Subscriber<T>, Disposable, Subscription { <ide> /** */ <ide> private static final long serialVersionUID = -8612022020200669122L; <ide> <ide> final Subscriber<? super T> actual; <del> final Consumer<? super R> disposer; <ide> <ide> final AtomicReference<Subscription> subscription = new AtomicReference<Subscription>(); <ide> <ide> static final Object TERMINATED = new Object(); <ide> <del> public SubscriberResourceWrapper(Subscriber<? super T> actual, Consumer<? super R> disposer) { <add> public SubscriberResourceWrapper(Subscriber<? super T> actual) { <ide> this.actual = actual; <del> this.disposer = disposer; <ide> } <ide> <ide> @Override <ide> public void request(long n) { <ide> } <ide> <ide> @Override <del> @SuppressWarnings("unchecked") <ide> public void dispose() { <ide> SubscriptionHelper.dispose(subscription); <ide> <del> Object o = get(); <del> if (o != TERMINATED) { <del> o = getAndSet(TERMINATED); <del> if (o != TERMINATED && o != null) { <del> disposer.accept((R)o); <del> } <del> } <add> DisposableHelper.dispose(this); <ide> } <ide> <ide> @Override <ide> public void cancel() { <ide> dispose(); <ide> } <ide> <del> @SuppressWarnings("unchecked") <del> public void setResource(R resource) { <del> for (;;) { <del> Object r = get(); <del> if (r == TERMINATED) { <del> disposer.accept(resource); <del> return; <del> } <del> if (compareAndSet(r, resource)) { <del> if (r != null) { <del> disposer.accept((R)r); <del> } <del> return; <del> } <del> } <add> public void setResource(Disposable resource) { <add> DisposableHelper.set(this, resource); <ide> } <ide> } <ide><path>src/main/java/io/reactivex/internal/subscriptions/ArrayCompositeSubscription.java <add>/** <add> * Copyright 2016 Netflix, Inc. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <add> * compliance with the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software distributed under the License is <add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <add> * the License for the specific language governing permissions and limitations under the License. <add> */ <add> <add>package io.reactivex.internal.subscriptions; <add> <add>import java.util.concurrent.atomic.AtomicReferenceArray; <add> <add>import org.reactivestreams.Subscription; <add> <add>import io.reactivex.disposables.Disposable; <add> <add>/** <add> * A composite disposable with a fixed number of slots. <add> * <add> * <p>Note that since the implementation leaks the methods of AtomicReferenceArray, one must be <add> * careful to only call setResource, replaceResource and dispose on it. All other methods may lead to undefined behavior <add> * and should be used by internal means only. <add> */ <add>public final class ArrayCompositeSubscription extends AtomicReferenceArray<Subscription> implements Disposable { <add> /** */ <add> private static final long serialVersionUID = 2746389416410565408L; <add> <add> public ArrayCompositeSubscription(int capacity) { <add> super(capacity); <add> } <add> <add> /** <add> * Sets the resource at the specified index and disposes the old resource. <add> * @param index <add> * @param resource <add> * @return true if the resource has ben set, false if the composite has been disposed <add> */ <add> public boolean setResource(int index, Subscription resource) { <add> for (;;) { <add> Subscription o = get(index); <add> if (o == SubscriptionHelper.CANCELLED) { <add> resource.cancel(); <add> return false; <add> } <add> if (compareAndSet(index, o, resource)) { <add> if (o != null) { <add> o.cancel(); <add> } <add> return true; <add> } <add> } <add> } <add> <add> /** <add> * Replaces the resource at the specified index and returns the old resource. <add> * @param index <add> * @param resource <add> * @return the old resource, can be null <add> */ <add> public Subscription replaceResource(int index, Subscription resource) { <add> for (;;) { <add> Subscription o = get(index); <add> if (o == SubscriptionHelper.CANCELLED) { <add> resource.cancel(); <add> return null; <add> } <add> if (compareAndSet(index, o, resource)) { <add> return o; <add> } <add> } <add> } <add> <add> @Override <add> public void dispose() { <add> if (get(0) != SubscriptionHelper.CANCELLED) { <add> int s = length(); <add> for (int i = 0; i < s; i++) { <add> Subscription o = get(i); <add> if (o != SubscriptionHelper.CANCELLED) { <add> o = getAndSet(i, SubscriptionHelper.CANCELLED); <add> if (o != SubscriptionHelper.CANCELLED && o != null) { <add> o.cancel(); <add> } <add> } <add> } <add> } <add> } <add> <add> @Override <add> public boolean isDisposed() { <add> return get(0) == SubscriptionHelper.CANCELLED; <add> } <add>} <ide><path>src/main/java/io/reactivex/internal/subscriptions/ScalarAsyncSubscription.java <ide> import org.reactivestreams.*; <ide> <ide> import io.reactivex.internal.functions.Objects; <del>import io.reactivex.plugins.RxJavaPlugins; <ide> <ide> /** <ide> * A Subscription that coordinates the emission of a single value set asynchronously. <ide><path>src/main/java/io/reactivex/internal/subscriptions/ScalarSubscription.java <ide> <ide> import org.reactivestreams.*; <ide> <del>import io.reactivex.plugins.RxJavaPlugins; <del> <ide> /** <ide> * A Subscription that holds a constant value and emits it only when requested. <ide> * @param <T> the value type <ide><path>src/main/java/io/reactivex/internal/subscriptions/SubscriptionHelper.java <ide> <ide> import org.reactivestreams.Subscription; <ide> <del>import io.reactivex.functions.Consumer; <ide> import io.reactivex.internal.functions.Objects; <ide> import io.reactivex.plugins.RxJavaPlugins; <ide> <ide> public static boolean validateRequest(long n) { <ide> return true; <ide> } <ide> <del> /** Singleton instance of a function which calls cancel on the supplied Subscription. */ <del> static final Consumer<Subscription> CONSUME_AND_CANCEL = new Consumer<Subscription>() { <del> @Override <del> public void accept(Subscription s) { <del> s.cancel(); <del> } <del> }; <del> <del> /** <del> * Returns a consumer which calls cancel on the supplied Subscription. <del> * @return a consumer which calls cancel on the supplied Subscription <del> * @deprecated use different resource management <del> */ <del> @Deprecated <del> public static Consumer<Subscription> consumeAndCancel() { <del> return CONSUME_AND_CANCEL; <del> } <del> <ide> /** <ide> * Represents a cancelled Subscription. <ide> */ <ide><path>src/main/java/io/reactivex/internal/util/OpenHashSet.java <ide> public boolean isEmpty() { <ide> public Object[] keys() { <ide> return keys; <ide> } <add> <add> public int size() { <add> return size; <add> } <ide> } <ide><path>src/main/java/io/reactivex/observers/AsyncObserver.java <ide> import java.util.concurrent.atomic.AtomicReference; <ide> <ide> import io.reactivex.Observer; <del>import io.reactivex.disposables.*; <add>import io.reactivex.disposables.Disposable; <ide> import io.reactivex.internal.disposables.*; <ide> import io.reactivex.internal.functions.Objects; <ide> <ide> private final AtomicReference<Disposable> s = new AtomicReference<Disposable>(); <ide> <ide> /** The resource composite, can be null. */ <del> private final ListCompositeResource<Disposable> resources; <add> private final ListCompositeDisposable resources; <ide> <ide> /** <ide> * Constructs an AsyncObserver with resource support. <ide> public AsyncObserver() { <ide> * @param withResources true if resource support should be on. <ide> */ <ide> public AsyncObserver(boolean withResources) { <del> this.resources = withResources ? new ListCompositeResource<Disposable>(Disposables.consumeAndDispose()) : null; <add> this.resources = withResources ? new ListCompositeDisposable() : null; <ide> } <ide> <ide> /** <ide><path>src/test/java/io/reactivex/internal/operators/observable/NbpOnSubscribeRefCountTest.java <ide> import io.reactivex.disposables.*; <ide> import io.reactivex.flowable.TestHelper; <ide> import io.reactivex.functions.*; <del>import io.reactivex.observers.Observers; <ide> import io.reactivex.observers.TestObserver; <ide> import io.reactivex.schedulers.*; <ide> import io.reactivex.subjects.ReplaySubject; <ide><path>src/test/java/io/reactivex/internal/operators/observable/NbpOperatorUnsubscribeOnTest.java <ide> public void subscribe(Observer<? super Integer> t1) { <ide> } <ide> <ide> private static class ThreadSubscription extends AtomicBoolean implements Disposable { <add> /** */ <add> private static final long serialVersionUID = -5011338112974328771L; <add> <ide> private volatile Thread thread; <ide> <ide> private final CountDownLatch latch = new CountDownLatch(1);
52
PHP
PHP
add missing import
14beafaebbd6fe0f1a48942389c57a67b69f23a2
<ide><path>src/Illuminate/Foundation/Testing/Concerns/InteractsWithRedis.php <ide> <ide> namespace Illuminate\Foundation\Testing\Concerns; <ide> <add>use Exception; <ide> use Illuminate\Redis\RedisManager; <ide> <ide> trait InteractsWithRedis <ide> public function setUpRedis() <ide> <ide> try { <ide> $this->redis['predis']->connection()->flushdb(); <del> } catch (\Exception $e) { <add> } catch (Exception $e) { <ide> if ($host === '127.0.0.1' && $port === 6379 && getenv('REDIS_HOST') === false) { <ide> $this->markTestSkipped('Trying default host/port failed, please set environment variable REDIS_HOST & REDIS_PORT to enable '.__CLASS__); <ide> static::$connectionFailedOnceWithDefaultsSkip = true;
1
Javascript
Javascript
simplify bundle building for non-production assets
31477ddc14b7e8fcc3ebeb7c69788aacdd0408d3
<ide><path>broccoli/packages.js <ide> module.exports.qunit = function _qunit() { <ide> <ide> module.exports.getPackagesES = function getPackagesES() { <ide> let input = new Funnel(`packages`, { <del> exclude: ['packages/node-module/**', 'packages/loader/**', 'packages/external-helpers/**'], <add> exclude: ['node-module/**', 'loader/**', 'external-helpers/**'], <ide> destDir: `packages`, <ide> }); <ide> <ide><path>ember-cli-build.js <ide> const { <ide> backburnerES, <ide> dagES, <ide> routeRecognizerES, <del> emberPkgES, <ide> glimmerTrees, <ide> nodeModuleUtils, <ide> emberVersionES, <ide> emberLicense, <ide> emberFeaturesES, <ide> nodeTests, <del> rollupEmberMetal, <ide> buildEmberEnvFlagsES, <ide> getPackagesES, <ide> } = require('./broccoli/packages'); <del>const SHOULD_ROLLUP = true; <ide> const ENV = process.env.EMBER_ENV || 'development'; <ide> <ide> module.exports = function() { <ide> let loader = internalLoader(); <del> let license = emberLicense(); <ide> let nodeModule = nodeModuleUtils(); <ide> <ide> // generate "loose" ES<latest> modules... <del> let combinedES = new MergeTrees([ <del> // dependencies <add> let dependenciesES = new MergeTrees([ <ide> backburnerES(), <ide> handlebarsES(), <del> simpleHTMLTokenizerES(), <ide> rsvpES(), <ide> dagES(), <ide> routerES(), <ide> routeRecognizerES(), <del> ...glimmerDependenciesES({ includeGlimmerCompiler: true }), <ide> <add> ...glimmerDependenciesES(), <add> ]); <add> <add> let templateCompilerDependenciesES = new MergeTrees([ <add> simpleHTMLTokenizerES(), <add> handlebarsES(), <add> ...glimmerTrees(['@glimmer/compiler']), <add> ]); <add> <add> let packagesES = new MergeTrees([ <ide> // dynamically generated packages <ide> emberVersionES(), <ide> emberFeaturesES(), <ide> emberLicense(), <ide> <add> // packages/** (after typescript compilation) <ide> getPackagesES(), <ide> ]); <ide> <del> let es = new Funnel(combinedES, { <del> destDir: 'es', <del> }); <add> let es = new Funnel( <add> new MergeTrees([packagesES, dependenciesES, templateCompilerDependenciesES], { <add> overwrite: true, <add> }), <add> { destDir: 'es' } <add> ); <ide> <ide> let esMin = minify( <del> new Funnel(combinedES, { <add> new Funnel(packagesES, { <ide> destDir: 'es-min', <ide> }) <ide> ); <ide> <del> let pkgAndTestESInAMD = toNamedAMD(combinedES); <add> let pkgAndTestESInAMD = toNamedAMD(packagesES); <ide> let emberEnvFlagsDebug = toNamedAMD(buildEmberEnvFlagsES({ DEBUG: true })); <ide> <ide> let pkgAndTestESBundleDebug = concat( <ide> module.exports = function() { <ide> } <ide> ); <ide> <del> let version = toES5(emberVersionES()); <del> let emberDebug = emberPkgES('ember-debug'); <ide> let babelDebugHelpersES5 = toES5(babelHelpers('debug'), { <ide> annotation: 'babel helpers debug', <ide> }); <del> let inlineParser = toES5(handlebarsES(), { annotation: 'handlebars' }); <del> let tokenizer = toES5(simpleHTMLTokenizerES(), { annotation: 'tokenizer' }); <del> let rsvp = toES5(rsvpES(), { annotation: 'rsvp' }); <del> let emberMetal = new Funnel('packages/ember-metal', { <del> destDir: '/', <del> include: ['**/*.js'], <del> exclude: ['tests'], <del> }); <del> let emberMetalES5 = rollupEmberMetal(emberMetal); <del> let emberConsole = emberPkgES('ember-console', SHOULD_ROLLUP, ['ember-environment']); <del> let emberConsoleES5 = toES5(emberConsole, { annotation: 'ember-console' }); <del> let emberEnvironment = emberPkgES('ember-environment'); <del> let emberEnvironmentES5 = toES5(emberEnvironment, { <del> annotation: 'ember-environment', <del> }); <ide> <ide> // ES5 <del> let combinedES5 = toES5(combinedES); <add> let packagesES5 = toES5(packagesES); <add> let dependenciesES5 = toES5(dependenciesES); <ide> <ide> // Bundling <ide> let emberTestsBundle = new MergeTrees([ <del> new Funnel(combinedES5, { <add> new Funnel(packagesES5, { <ide> include: ['internal-test-helpers/**', '*/tests/**', 'license.js'], <ide> }), <ide> loader, <ide> module.exports = function() { <ide> }); <ide> <ide> let emberDebugBundle = new MergeTrees([ <del> new Funnel(combinedES5, { <add> new Funnel(packagesES5, { <ide> exclude: ['*/tests/**'], <ide> }), <add> dependenciesES5, <ide> loader, <ide> nodeModule, <ide> bootstrapModule('ember'), <ide> module.exports = function() { <ide> }); <ide> <ide> let emberTestingBundle = new MergeTrees([ <del> new Funnel(combinedES5, { <add> new Funnel(packagesES5, { <ide> include: ['ember-debug/**', 'ember-testing/**', 'license.js'], <ide> }), <ide> loader, <ide> module.exports = function() { <ide> `, <ide> }); <ide> <del> function templateCompiler(babelHelpers) { <add> function templateCompiler() { <ide> return new MergeTrees([ <del> new Funnel(combinedES5, { <add> new Funnel(packagesES5, { <ide> include: [ <ide> 'license.js', <ide> 'ember/features.js', <add> 'ember/version.js', <ide> 'ember-debug/**', <ide> 'ember-environment/**', <ide> 'ember-template-compiler/**', <ide> 'ember-utils/**', <ide> ], <ide> }), <del> ...glimmerTrees(['@glimmer/compiler']).map(toES5), <del> tokenizer, <del> inlineParser, <del> babelHelpers, <add> toES5(templateCompilerDependenciesES), <ide> bootstrapModule('ember-template-compiler', 'umd'), <ide> ]); <ide> } <ide> module.exports = function() { <ide> } <ide> } else { <ide> let emberTemplateCompilerBundle = new MergeTrees([ <del> templateCompiler(babelDebugHelpersES5), <del> version, <add> templateCompiler(), <ide> loader, <add> babelDebugHelpersES5, <ide> nodeModule, <ide> ]); <ide> <ide> module.exports = function() { <ide> ]); <ide> }; <ide> <del>function glimmerDependenciesES(options = {}) { <add>function glimmerDependenciesES() { <ide> let glimmerEntries = ['@glimmer/node', '@glimmer/opcode-compiler', '@glimmer/runtime']; <ide> <del> if (options.includeGlimmerCompiler) { <del> glimmerEntries.push('@glimmer/compiler'); <del> } <del> <ide> if (ENV === 'development') { <ide> let hasGlimmerDebug = true; <ide> try {
2
Mixed
Javascript
capitalize more comments
b08a867d6016ccf04783a0f91fdbcc3460daf234
<ide><path>benchmark/_cli.js <ide> function CLI(usage, settings) { <ide> if (!(this instanceof CLI)) return new CLI(usage, settings); <ide> <ide> if (process.argv.length < 3) { <del> this.abort(usage); // abort will exit the process <add> this.abort(usage); // Abort will exit the process <ide> } <ide> <ide> this.usage = usage; <ide><path>benchmark/common.js <ide> Benchmark.prototype.end = function(operations) { <ide> if (elapsed[0] === 0 && elapsed[1] === 0) { <ide> if (!process.env.NODEJS_BENCHMARK_ZERO_ALLOWED) <ide> throw new Error('insufficient clock precision for short benchmark'); <del> // avoid dividing by zero <add> // Avoid dividing by zero <ide> elapsed[1] = 1; <ide> } <ide> <ide><path>benchmark/crypto/hash-stream-creation.js <del>// throughput benchmark <add>// Throughput benchmark <ide> // creates a single hasher, then pushes a bunch of data through it <ide> 'use strict'; <ide> const common = require('../common.js'); <ide><path>benchmark/crypto/hash-stream-throughput.js <del>// throughput benchmark <add>// Throughput benchmark <ide> // creates a single hasher, then pushes a bunch of data through it <ide> 'use strict'; <ide> const common = require('../common.js'); <ide><path>benchmark/fixtures/simple-http-server.js <ide> const storedUnicode = Object.create(null); <ide> <ide> const useDomains = process.env.NODE_USE_DOMAINS; <ide> <del>// set up one global domain. <add>// Set up one global domain. <ide> if (useDomains) { <ide> var domain = require('domain'); <ide> const gdom = domain.create(); <ide><path>benchmark/http/cluster.js <ide> const PORT = common.PORT; <ide> const cluster = require('cluster'); <ide> if (cluster.isMaster) { <ide> var bench = common.createBenchmark(main, { <del> // unicode confuses ab on os x. <add> // Unicode confuses ab on os x. <ide> type: ['bytes', 'buffer'], <ide> len: [4, 1024, 102400], <ide> c: [50, 500] <ide><path>benchmark/http/incoming_headers.js <ide> const common = require('../common.js'); <ide> const http = require('http'); <ide> <ide> const bench = common.createBenchmark(main, { <del> // unicode confuses ab on os x. <add> // Unicode confuses ab on os x. <ide> c: [50, 500], <ide> n: [0, 5, 20] <ide> }); <ide><path>benchmark/http/simple.js <ide> const common = require('../common.js'); <ide> <ide> const bench = common.createBenchmark(main, { <del> // unicode confuses ab on os x. <add> // Unicode confuses ab on os x. <ide> type: ['bytes', 'buffer'], <ide> len: [4, 1024, 102400], <ide> chunks: [1, 4], <ide><path>benchmark/net/net-c2s-cork.js <ide> function main({ dur, len, type }) { <ide> <ide> const writer = new Writer(); <ide> <del> // the actual benchmark. <add> // The actual benchmark. <ide> const server = net.createServer((socket) => { <ide> socket.pipe(writer); <ide> }); <ide><path>benchmark/net/net-c2s.js <ide> function main({ dur, len, type }) { <ide> const reader = new Reader(); <ide> const writer = new Writer(); <ide> <del> // the actual benchmark. <add> // The actual benchmark. <ide> const server = net.createServer((socket) => { <ide> socket.pipe(writer); <ide> }); <ide><path>benchmark/net/net-pipe.js <ide> function main({ dur, len, type }) { <ide> const reader = new Reader(); <ide> const writer = new Writer(); <ide> <del> // the actual benchmark. <add> // The actual benchmark. <ide> const server = net.createServer((socket) => { <ide> socket.pipe(socket); <ide> }); <ide><path>benchmark/net/net-s2c.js <ide> function main({ dur, len, type }) { <ide> const reader = new Reader(); <ide> const writer = new Writer(); <ide> <del> // the actual benchmark. <add> // The actual benchmark. <ide> const server = net.createServer((socket) => { <ide> reader.pipe(socket); <ide> }); <ide><path>benchmark/net/net-wrap-js-stream-passthrough.js <ide> function main({ dur, len, type }) { <ide> const reader = new Reader(); <ide> const writer = new Writer(); <ide> <del> // the actual benchmark. <add> // The actual benchmark. <ide> const fakeSocket = new JSStreamWrap(new PassThrough()); <ide> bench.start(); <ide> reader.pipe(fakeSocket); <ide><path>benchmark/url/legacy-vs-whatwg-url-parse.js <ide> const bench = common.createBenchmark(main, { <ide> <ide> function useLegacy(data) { <ide> const len = data.length; <del> var result = url.parse(data[0]); // avoid dead code elimination <add> var result = url.parse(data[0]); // Avoid dead code elimination <ide> bench.start(); <ide> for (var i = 0; i < len; ++i) { <ide> result = url.parse(data[i]); <ide> function useLegacy(data) { <ide> <ide> function useWHATWGWithBase(data) { <ide> const len = data.length; <del> var result = new URL(data[0][0], data[0][1]); // avoid dead code elimination <add> var result = new URL(data[0][0], data[0][1]); // Avoid dead code elimination <ide> bench.start(); <ide> for (var i = 0; i < len; ++i) { <ide> const item = data[i]; <ide> function useWHATWGWithBase(data) { <ide> <ide> function useWHATWGWithoutBase(data) { <ide> const len = data.length; <del> var result = new URL(data[0]); // avoid dead code elimination <add> var result = new URL(data[0]); // Avoid dead code elimination <ide> bench.start(); <ide> for (var i = 0; i < len; ++i) { <ide> result = new URL(data[i]); <ide><path>benchmark/url/url-searchparams-sort.js <ide> const common = require('../common.js'); <ide> const URLSearchParams = require('url').URLSearchParams; <ide> <ide> const inputs = { <del> wpt: 'wpt', // to work around tests <add> wpt: 'wpt', // To work around tests <ide> empty: '', <ide> sorted: 'a&b&c&d&e&f&g&h&i&j&k&l&m&n&o&p&q&r&s&t&u&v&w&x&y&z', <ide> almostsorted: 'a&b&c&d&e&f&g&i&h&j&k&l&m&n&o&p&q&r&s&t&u&w&v&x&y&z', <ide><path>doc/api/cluster.md <ide> if (cluster.isMaster) { <ide> } else if (cluster.isWorker) { <ide> const net = require('net'); <ide> const server = net.createServer((socket) => { <del> // connections never end <add> // Connections never end <ide> }); <ide> <ide> server.listen(8000); <ide><path>doc/api/console.md <ide> error output. If `stderr` is not provided, `stdout` is used for `stderr`. <ide> ```js <ide> const output = fs.createWriteStream('./stdout.log'); <ide> const errorOutput = fs.createWriteStream('./stderr.log'); <del>// custom simple logger <add>// Custom simple logger <ide> const logger = new Console({ stdout: output, stderr: errorOutput }); <ide> // use it like console <ide> const count = 5; <ide> logger.log('count: %d', count); <del>// in stdout.log: count 5 <add>// In stdout.log: count 5 <ide> ``` <ide> <ide> The global `console` is a special `Console` whose output is sent to <ide><path>doc/api/domain.md <ide> if (cluster.isMaster) { <ide> // But don't keep the process open just for that! <ide> killtimer.unref(); <ide> <del> // stop taking new requests. <add> // Stop taking new requests. <ide> server.close(); <ide> <ide> // Let the master know we're dead. This will trigger a <ide> const d = domain.create(); <ide> <ide> function readSomeFile(filename, cb) { <ide> fs.readFile(filename, 'utf8', d.bind((er, data) => { <del> // If this throws, it will also be passed to the domain <add> // If this throws, it will also be passed to the domain. <ide> return cb(er, data ? JSON.parse(data) : null); <ide> })); <ide> } <ide> <ide> d.on('error', (er) => { <del> // an error occurred somewhere. <del> // if we throw it now, it will crash the program <add> // An error occurred somewhere. If we throw it now, it will crash the program <ide> // with the normal line number and stack message. <ide> }); <ide> ``` <ide> function readSomeFile(filename, cb) { <ide> // callback since it is assumed to be the 'Error' argument <ide> // and thus intercepted by the domain. <ide> <del> // if this throws, it will also be passed to the domain <add> // If this throws, it will also be passed to the domain <ide> // so the error-handling logic can be moved to the 'error' <ide> // event on the domain instead of being repeated throughout <ide> // the program. <ide> function readSomeFile(filename, cb) { <ide> } <ide> <ide> d.on('error', (er) => { <del> // an error occurred somewhere. <del> // if we throw it now, it will crash the program <add> // An error occurred somewhere. If we throw it now, it will crash the program <ide> // with the normal line number and stack message. <ide> }); <ide> ``` <ide><path>doc/api/errors.md <ide> program. <ide> try { <ide> require('vm').runInThisContext('binary ! isNotOk'); <ide> } catch (err) { <del> // err will be a SyntaxError <add> // 'err' will be a SyntaxError. <ide> } <ide> ``` <ide> <ide> string would be considered a `TypeError`. <ide> <ide> ```js <ide> require('url').parse(() => { }); <del>// throws TypeError, since it expected a string <add>// Throws TypeError, since it expected a string. <ide> ``` <ide> <ide> Node.js will generate and throw `TypeError` instances *immediately* as a form <ide><path>doc/api/events.md <ide> emitter.on('log', () => console.log('log persistently')); <ide> // Will return a new Array with a single function bound by `.on()` above <ide> const newListeners = emitter.rawListeners('log'); <ide> <del>// logs "log persistently" twice <add>// Logs "log persistently" twice <ide> newListeners[0](); <ide> emitter.emit('log'); <ide> ``` <ide><path>doc/api/http.md <ide> const proxy = http.createServer((req, res) => { <ide> res.end('okay'); <ide> }); <ide> proxy.on('connect', (req, cltSocket, head) => { <del> // connect to an origin server <add> // Connect to an origin server <ide> const srvUrl = url.parse(`http://${req.url}`); <ide> const srvSocket = net.connect(srvUrl.port, srvUrl.hostname, () => { <ide> cltSocket.write('HTTP/1.1 200 Connection Established\r\n' + <ide> proxy.on('connect', (req, cltSocket, head) => { <ide> }); <ide> }); <ide> <del>// now that proxy is running <add>// Now that proxy is running <ide> proxy.listen(1337, '127.0.0.1', () => { <ide> <ide> // Make a request to a tunneling proxy <ide> srv.on('upgrade', (req, socket, head) => { <ide> socket.pipe(socket); // echo back <ide> }); <ide> <del>// now that server is running <add>// Now that server is running <ide> srv.listen(1337, '127.0.0.1', () => { <ide> <ide> // make a request <ide> request.setHeader('content-type', 'text/html'); <ide> request.setHeader('Content-Length', Buffer.byteLength(body)); <ide> request.setHeader('Cookie', ['type=ninja', 'language=javascript']); <ide> const contentType = request.getHeader('Content-Type'); <del>// contentType is 'text/html' <add>// 'contentType' is 'text/html' <ide> const contentLength = request.getHeader('Content-Length'); <del>// contentLength is of type number <add>// 'contentLength' is of type number <ide> const cookie = request.getHeader('Cookie'); <del>// cookie is of type string[] <add>// 'cookie' is of type string[] <ide> ``` <ide> <ide> ### request.maxHeadersCount <ide> req.once('response', (res) => { <ide> const ip = req.socket.localAddress; <ide> const port = req.socket.localPort; <ide> console.log(`Your IP address is ${ip} and your source port is ${port}.`); <del> // consume response object <add> // Consume response object <ide> }); <ide> ``` <ide> <ide> req.on('error', (e) => { <ide> console.error(`problem with request: ${e.message}`); <ide> }); <ide> <del>// write data to request body <add>// Write data to request body <ide> req.write(postData); <ide> req.end(); <ide> ``` <ide><path>doc/api/http2.md <ide> const client = http2.connect('http://localhost'); <ide> <ide> client.on('stream', (pushedStream, requestHeaders) => { <ide> pushedStream.on('push', (responseHeaders) => { <del> // process response headers <add> // Process response headers <ide> }); <ide> pushedStream.on('data', (chunk) => { /* handle pushed data */ }); <ide> }); <ide><path>doc/api/process.md <ide> process.on('unhandledRejection', (reason, p) => { <ide> }); <ide> <ide> somePromise.then((res) => { <del> return reportToUser(JSON.pasre(res)); // note the typo (`pasre`) <del>}); // no `.catch()` or `.then()` <add> return reportToUser(JSON.pasre(res)); // Note the typo (`pasre`) <add>}); // No `.catch()` or `.then()` <ide> ``` <ide> <ide> The following will also trigger the `'unhandledRejection'` event to be <ide><path>doc/api/querystring.md <ide> querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' }); <ide> // Returns 'foo=bar&baz=qux&baz=quux&corge=' <ide> <ide> querystring.stringify({ foo: 'bar', baz: 'qux' }, ';', ':'); <del>// returns 'foo:bar;baz:qux' <add>// Returns 'foo:bar;baz:qux' <ide> ``` <ide> <ide> By default, characters requiring percent-encoding within the query string will <ide><path>doc/api/stream.md <ide> pass.unpipe(writable); <ide> // readableFlowing is now false <ide> <ide> pass.on('data', (chunk) => { console.log(chunk.toString()); }); <del>pass.write('ok'); // will not emit 'data' <add>pass.write('ok'); // Will not emit 'data' <ide> pass.resume(); // Must be called to make stream emit 'data' <ide> ``` <ide> <ide> function parseHeader(stream, callback) { <ide> while (null !== (chunk = stream.read())) { <ide> const str = decoder.write(chunk); <ide> if (str.match(/\n\n/)) { <del> // found the header boundary <add> // Found the header boundary <ide> const split = str.split(/\n\n/); <ide> header += split.shift(); <ide> const remaining = split.join('\n\n'); <ide> function parseHeader(stream, callback) { <ide> // Now the body of the message can be read from the stream. <ide> callback(null, header, stream); <ide> } else { <del> // still reading the header. <add> // Still reading the header. <ide> header += str; <ide> } <ide> } <ide><path>doc/api/timers.md <ide> setImmediatePromise('foobar').then((value) => { <ide> // This is executed after all I/O callbacks. <ide> }); <ide> <del>// or with async function <add>// Or with async function <ide> async function timerExample() { <ide> console.log('Before I/O callbacks'); <ide> await setImmediatePromise(); <ide><path>doc/api/util.md <ide> option properties directly is also supported. <ide> const util = require('util'); <ide> const arr = Array(101).fill(0); <ide> <del>console.log(arr); // logs the truncated array <add>console.log(arr); // Logs the truncated array <ide> util.inspect.defaultOptions.maxArrayLength = null; <ide> console.log(arr); // logs the full array <ide> ``` <ide><path>doc/api/zlib.md <ide> See [Memory Usage Tuning][] for more information on the speed/memory/compression <ide> tradeoffs involved in `zlib` usage. <ide> <ide> ```js <del>// client request example <add>// Client request example <ide> const zlib = require('zlib'); <ide> const http = require('http'); <ide> const fs = require('fs'); <ide><path>lib/_http_server.js <ide> function writeHead(statusCode, reason, obj) { <ide> if (k === undefined && this._header) { <ide> throw new ERR_HTTP_HEADERS_SENT('render'); <ide> } <del> // only progressive api is used <add> // Only progressive api is used <ide> headers = this[outHeadersKey]; <ide> } else { <del> // only writeHead() called <add> // Only writeHead() called <ide> headers = obj; <ide> } <ide> <ide> function socketOnTimeout() { <ide> <ide> function socketOnClose(socket, state) { <ide> debug('server socket close'); <del> // mark this parser as reusable <add> // Mark this parser as reusable <ide> if (socket.parser) { <ide> freeParser(socket.parser, null, socket); <ide> } <ide> function abortIncoming(incoming) { <ide> req.emit('aborted'); <ide> req.emit('close'); <ide> } <del> // abort socket._httpMessage ? <add> // Abort socket._httpMessage ? <ide> } <ide> <ide> function socketOnEnd(server, socket, parser, state) { <ide><path>lib/_stream_duplex.js <ide> Object.defineProperty(Duplex.prototype, 'writableLength', { <ide> } <ide> }); <ide> <del>// the no-half-open enforcer <add>// The no-half-open enforcer <ide> function onend() { <ide> // If the writable side ended, then we're ok. <ide> if (this._writableState.ended) <ide> return; <ide> <del> // no more data can be written. <add> // No more data can be written. <ide> // But allow more writes to happen in this tick. <ide> process.nextTick(onEndNT, this); <ide> } <ide><path>lib/_stream_readable.js <ide> function ReadableState(options, stream, isDuplex) { <ide> // Should .destroy() be called after 'end' (and potentially 'finish') <ide> this.autoDestroy = !!options.autoDestroy; <ide> <del> // has it been destroyed <add> // Has it been destroyed <ide> this.destroyed = false; <ide> <ide> // Crypto is kind of old and crusty. Historically, its default string <ide> function addChunk(stream, state, chunk, addToFront) { <ide> state.awaitDrain = 0; <ide> stream.emit('data', chunk); <ide> } else { <del> // update the buffer info. <add> // Update the buffer info. <ide> state.length += state.objectMode ? 1 : chunk.length; <ide> if (addToFront) <ide> state.buffer.unshift(chunk); <ide> Readable.prototype.isPaused = function() { <ide> return this._readableState.flowing === false; <ide> }; <ide> <del>// backwards compatibility. <add>// Backwards compatibility. <ide> Readable.prototype.setEncoding = function(enc) { <ide> if (!StringDecoder) <ide> StringDecoder = require('string_decoder').StringDecoder; <ide> Readable.prototype.read = function(n) { <ide> // If the length is currently zero, then we *need* a readable event. <ide> if (state.length === 0) <ide> state.needReadable = true; <del> // call internal read method <add> // Call internal read method <ide> this._read(state.highWaterMark); <ide> state.sync = false; <ide> // If _read pushed data synchronously, then `reading` will be false, <ide> Readable.prototype.unpipe = function(dest) { <ide> return this; <ide> } <ide> <del> // try to find the right one. <add> // Try to find the right one. <ide> var index = state.pipes.indexOf(dest); <ide> if (index === -1) <ide> return this; <ide> Readable.prototype.wrap = function(stream) { <ide> } <ide> }); <ide> <del> // proxy all the other methods. <del> // important when wrapping filters and duplexes. <add> // Proxy all the other methods. Important when wrapping filters and duplexes. <ide> for (var i in stream) { <ide> if (this[i] === undefined && typeof stream[i] === 'function') { <ide> this[i] = function methodWrap(method) { <ide><path>lib/_stream_writable.js <ide> function WritableState(options, stream, isDuplex) { <ide> <ide> // drain event flag. <ide> this.needDrain = false; <del> // at the start of calling end() <add> // At the start of calling end() <ide> this.ending = false; <ide> // When end() has been called, and returned <ide> this.ended = false; <del> // when 'finish' is emitted <add> // When 'finish' is emitted <ide> this.finished = false; <ide> <del> // has it been destroyed <add> // Has it been destroyed <ide> this.destroyed = false; <ide> <ide> // Should we decode strings into buffers before passing to _write? <ide> function WritableState(options, stream, isDuplex) { <ide> // Should .destroy() be called after 'finish' (and potentially 'end') <ide> this.autoDestroy = !!options.autoDestroy; <ide> <del> // count buffered requests <add> // Count buffered requests <ide> this.bufferedRequestCount = 0; <ide> <ide> // Allocate the first CorkedRequest, there is always <ide> function onCorkedFinish(corkReq, state, err) { <ide> entry = entry.next; <ide> } <ide> <del> // reuse the free corkReq. <add> // Reuse the free corkReq. <ide> state.corkedRequestsFree.next = corkReq; <ide> } <ide> <ide><path>lib/buffer.js <ide> function fromArrayLike(obj) { <ide> } <ide> <ide> function fromArrayBuffer(obj, byteOffset, length) { <del> // convert byteOffset to integer <add> // Convert byteOffset to integer <ide> if (byteOffset === undefined) { <ide> byteOffset = 0; <ide> } else { <ide><path>lib/dgram.js <ide> Socket.prototype.bind = function(port_, address_ /* , callback */) { <ide> address = '::'; <ide> } <ide> <del> // resolve address first <add> // Resolve address first <ide> state.handle.lookup(address, (err, ip) => { <ide> if (err) { <ide> state.bindState = BIND_STATE_UNBOUND; <ide><path>lib/domain.js <ide> const asyncHook = createHook({ <ide> }, <ide> before(asyncId) { <ide> const current = pairing.get(asyncId); <del> if (current !== undefined) { // enter domain for this cb <add> if (current !== undefined) { // Enter domain for this cb <ide> // We will get the domain through current.get(), because the resource <ide> // object's .domain property makes sure it is not garbage collected. <ide> current.get().enter(); <ide> } <ide> }, <ide> after(asyncId) { <ide> const current = pairing.get(asyncId); <del> if (current !== undefined) { // exit domain for this cb <add> if (current !== undefined) { // Exit domain for this cb <ide> current.get().exit(); <ide> } <ide> }, <ide><path>lib/fs.js <ide> function handleErrorFromBinding(ctx) { <ide> Error.captureStackTrace(err, handleErrorFromBinding); <ide> throw err; <ide> } <del> if (ctx.error !== undefined) { // errors created in C++ land. <add> if (ctx.error !== undefined) { // Errors created in C++ land. <ide> // TODO(joyeecheung): currently, ctx.error are encoding errors <ide> // usually caused by memory problems. We need to figure out proper error <ide> // code(s) for this. <ide> function readFile(path, options, callback) { <ide> if (!ReadFileContext) <ide> ReadFileContext = require('internal/fs/read_file_context'); <ide> const context = new ReadFileContext(callback, options.encoding); <del> context.isUserFd = isFd(path); // file descriptor ownership <add> context.isUserFd = isFd(path); // File descriptor ownership <ide> <ide> const req = new FSReqCallback(); <ide> req.context = context; <ide> function tryReadSync(fd, isUserFd, buffer, pos, len) { <ide> <ide> function readFileSync(path, options) { <ide> options = getOptions(options, { flag: 'r' }); <del> const isUserFd = isFd(path); // file descriptor ownership <add> const isUserFd = isFd(path); // File descriptor ownership <ide> const fd = isUserFd ? path : fs.openSync(path, options.flag, 0o666); <ide> <ide> const stats = tryStatSync(fd, isUserFd); <ide> const size = isFileType(stats, S_IFREG) ? stats[8] : 0; <ide> let pos = 0; <del> let buffer; // single buffer with file data <del> let buffers; // list for when size is unknown <add> let buffer; // Single buffer with file data <add> let buffers; // List for when size is unknown <ide> <ide> if (size === 0) { <ide> buffers = []; <ide> function writeFileSync(path, data, options) { <ide> options = getOptions(options, { encoding: 'utf8', mode: 0o666, flag: 'w' }); <ide> const flag = options.flag || 'w'; <ide> <del> const isUserFd = isFd(path); // file descriptor ownership <add> const isUserFd = isFd(path); // File descriptor ownership <ide> const fd = isUserFd ? path : fs.openSync(path, flag, options.mode); <ide> <ide> if (!isArrayBufferView(data)) { <ide><path>lib/internal/child_process.js <ide> function setupChannel(target, channel) { <ide> // Object where socket lists will live <ide> channel.sockets = { got: {}, send: {} }; <ide> <del> // handlers will go through this <add> // Handlers will go through this <ide> target.on('internalMessage', function(message, handle) { <ide> // Once acknowledged - continue sending handles. <ide> if (message.cmd === 'NODE_HANDLE_ACK' || <ide><path>lib/internal/freeze_intrinsics.js <ide> // limitations under the License. <ide> // SPDX-License-Identifier: MIT <ide> <del>// based upon: <add>// Based upon: <ide> // https://github.com/google/caja/blob/master/src/com/google/caja/ses/startSES.js <ide> // https://github.com/google/caja/blob/master/src/com/google/caja/ses/repairES5.js <ide> // https://github.com/tc39/proposal-frozen-realms/blob/91ac390e3451da92b5c27e354b39e52b7636a437/shim/src/deep-freeze.js <ide> module.exports = function() { <ide> // NB: handle for any new cases in future <ide> } <ide> if (frozenSet.has(val) || freezingSet.has(val)) { <del> // todo use uncurried form <add> // TODO: Use uncurried form <ide> // Ignore if already frozen or freezing <ide> return; <ide> } <del> freezingSet.add(val); // todo use uncurried form <add> freezingSet.add(val); // TODO: Use uncurried form <ide> } <ide> <ide> function doFreeze(obj) { <ide> module.exports = function() { <ide> const descs = getOwnPropertyDescriptors(obj); <ide> enqueue(proto); <ide> ownKeys(descs).forEach((name) => { <del> // todo uncurried form <del> // todo: getOwnPropertyDescriptors is guaranteed to return well-formed <add> // TODO: Uncurried form <add> // TODO: getOwnPropertyDescriptors is guaranteed to return well-formed <ide> // descriptors, but they still inherit from Object.prototype. If <ide> // someone has poisoned Object.prototype to add 'value' or 'get' <ide> // properties, then a simple 'if ("value" in desc)' or 'desc.value' <ide> module.exports = function() { <ide> <ide> function dequeue() { <ide> // New values added before forEach() has finished will be visited. <del> freezingSet.forEach(doFreeze); // todo curried forEach <add> freezingSet.forEach(doFreeze); // TODO: Curried forEach <ide> } <ide> <ide> function commit() { <del> // todo curried forEach <del> // we capture the real WeakSet.prototype.add above, in case someone <add> // TODO: Curried forEach <add> // We capture the real WeakSet.prototype.add above, in case someone <ide> // changes it. The two-argument form of forEach passes the second <ide> // argument as the 'this' binding, so we add to the correct set. <ide> freezingSet.forEach(frozenSet.add, frozenSet); <ide><path>lib/internal/fs/streams.js <ide> ReadStream.prototype.open = function() { <ide> this.fd = fd; <ide> this.emit('open', fd); <ide> this.emit('ready'); <del> // start the flow of data. <add> // Start the flow of data. <ide> this.read(); <ide> }); <ide> }; <ide> ReadStream.prototype._read = function(n) { <ide> return; <ide> <ide> if (!pool || pool.length - pool.used < kMinPoolSpace) { <del> // discard the old pool. <add> // Discard the old pool. <ide> allocNewPool(this.readableHighWaterMark); <ide> } <ide> <ide><path>lib/internal/modules/cjs/loader.js <ide> Module.prototype.load = function(filename) { <ide> const module = ESMLoader.moduleMap.get(url); <ide> // Create module entry at load time to snapshot exports correctly <ide> const exports = this.exports; <del> if (module !== undefined) { // called from cjs translator <add> if (module !== undefined) { // Called from cjs translator <ide> module.reflect.onReady((reflect) => { <ide> reflect.exports.default.set(exports); <ide> }); <ide> if (experimentalModules) { <ide> }; <ide> } <ide> <del>// bootstrap main module. <add>// Bootstrap main module. <ide> Module.runMain = function() { <ide> // Load the main module--the command line argument. <ide> if (experimentalModules) { <ide><path>lib/internal/modules/esm/translators.js <ide> translators.set('cjs', async (url, isMain) => { <ide> // through normal resolution <ide> translators.set('builtin', async (url) => { <ide> debug(`Translating BuiltinModule ${url}`); <del> // slice 'node:' scheme <add> // Slice 'node:' scheme <ide> const id = url.slice(5); <ide> const module = NativeModule.map.get(id); <ide> if (!module) { <ide><path>lib/internal/process/per_thread.js <ide> function wrapProcessMethods(binding) { <ide> throw new ERR_INVALID_ARG_TYPE('pid', 'number', pid); <ide> } <ide> <del> // preserve null signal <add> // Preserve null signal <ide> if (sig === (sig | 0)) { <ide> // XXX(joyeecheung): we have to use process._kill here because <ide> // it's monkey-patched by tests. <ide> function buildAllowedFlags() { <ide> } <ide> <ide> delete() { <del> // noop, `Set` API compatible <add> // No-op, `Set` API compatible <ide> return false; <ide> } <ide> <ide> clear() { <del> // noop <add> // No-op <ide> } <ide> <ide> has(key) { <ide><path>lib/internal/readline.js <ide> function* emitKeys(stream) { <ide> } <ide> <ide> if (escaped && (ch === 'O' || ch === '[')) { <del> // ansi escape sequence <add> // ANSI escape sequence <ide> let code = ch; <ide> let modifier = 0; <ide> <ide> function* emitKeys(stream) { <ide> key.name = String.fromCharCode(ch.charCodeAt(0) + 'a'.charCodeAt(0) - 1); <ide> key.ctrl = true; <ide> } else if (/^[0-9A-Za-z]$/.test(ch)) { <del> // letter, number, shift+letter <add> // Letter, number, shift+letter <ide> key.name = ch.toLowerCase(); <ide> key.shift = /^[A-Z]$/.test(ch); <ide> key.meta = escaped; <ide><path>lib/internal/streams/async_iterator.js <ide> function readAndResolve(iter) { <ide> const resolve = iter[kLastResolve]; <ide> if (resolve !== null) { <ide> const data = iter[kStream].read(); <del> // we defer if data is null <del> // we can be expecting either 'end' or <del> // 'error' <add> // We defer if data is null. We can be expecting either 'end' or 'error'. <ide> if (data !== null) { <ide> iter[kLastPromise] = null; <ide> iter[kLastResolve] = null; <ide> function readAndResolve(iter) { <ide> <ide> function onReadable(iter) { <ide> // We wait for the next tick, because it might <del> // emit an error with process.nextTick <add> // emit an error with `process.nextTick()`. <ide> process.nextTick(readAndResolve, iter); <ide> } <ide> <ide> const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ <ide> <ide> next() { <ide> // If we have detected an error in the meanwhile <del> // reject straight away <add> // reject straight away. <ide> const error = this[kError]; <ide> if (error !== null) { <ide> return Promise.reject(error); <ide> const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ <ide> }, <ide> <ide> return() { <del> // destroy(err, cb) is a private API <del> // we can guarantee we have that here, because we control the <del> // Readable class this is attached to <add> // destroy(err, cb) is a private API. <add> // We can guarantee we have that here, because we control the <add> // Readable class this is attached to. <ide> return new Promise((resolve, reject) => { <ide> this[kStream].destroy(null, (err) => { <ide> if (err) { <ide> const createReadableStreamAsyncIterator = (stream) => { <ide> value: stream._readableState.endEmitted, <ide> writable: true <ide> }, <del> // The function passed to new Promise <del> // is cached so we avoid allocating a new <del> // closure at every run <add> // The function passed to new Promise is cached so we avoid allocating a new <add> // closure at every run. <ide> [kHandlePromise]: { <ide> value: (resolve, reject) => { <ide> const data = iterator[kStream].read(); <ide> const createReadableStreamAsyncIterator = (stream) => { <ide> finished(stream, (err) => { <ide> if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { <ide> const reject = iterator[kLastReject]; <del> // Reject if we are waiting for data in the Promise <del> // returned by next() and store the error <add> // Reject if we are waiting for data in the Promise returned by next() and <add> // store the error. <ide> if (reject !== null) { <ide> iterator[kLastPromise] = null; <ide> iterator[kLastResolve] = null; <ide><path>lib/internal/timers.js <ide> function initAsyncResource(resource, type) { <ide> // Timer constructor function. <ide> // The entire prototype is defined in lib/timers.js <ide> function Timeout(callback, after, args, isRepeat) { <del> after *= 1; // coalesce to number or NaN <add> after *= 1; // Coalesce to number or NaN <ide> if (!(after >= 1 && after <= TIMEOUT_MAX)) { <ide> if (after > TIMEOUT_MAX) { <ide> process.emitWarning(`${after} does not fit into` + <ide><path>lib/internal/trace_events_async_hooks.js <ide> function createHook() { <ide> <ide> trace(kEndEvent, kTraceEventCategory, type, asyncId); <ide> <del> // cleanup asyncId to type map <add> // Cleanup asyncId to type map <ide> typeMemory.delete(asyncId); <ide> } <ide> }); <ide><path>lib/internal/url.js <ide> class URLSearchParams { <ide> throw new ERR_ARG_NOT_ITERABLE('Query pairs'); <ide> } <ide> <del> // sequence<sequence<USVString>> <add> // Sequence<sequence<USVString>> <ide> // Note: per spec we have to first exhaust the lists then process them <ide> const pairs = []; <ide> for (const pair of init) { <ide> class URLSearchParams { <ide> this[searchParams].push(pair[0], pair[1]); <ide> } <ide> } else { <del> // record<USVString, USVString> <add> // Record<USVString, USVString> <ide> // Need to use reflection APIs for full spec compliance. <ide> this[searchParams] = []; <ide> const keys = Reflect.ownKeys(init); <ide> function onParseComplete(flags, protocol, username, password, <ide> ctx.query = query; <ide> ctx.fragment = fragment; <ide> ctx.host = host; <del> if (!this[searchParams]) { // invoked from URL constructor <add> if (!this[searchParams]) { // Invoked from URL constructor <ide> this[searchParams] = new URLSearchParams(); <ide> this[searchParams][context] = this; <ide> } <ide> Object.defineProperties(URL.prototype, { <ide> try { <ide> return (new URL(ctx.path[0])).origin; <ide> } catch { <del> // fall through... do nothing <add> // Fall through... do nothing <ide> } <ide> } <ide> return kOpaqueOrigin; <ide><path>lib/net.js <ide> function emitErrorNT(self, err) { <ide> <ide> <ide> function emitListeningNT(self) { <del> // ensure handle hasn't closed <add> // Ensure handle hasn't closed <ide> if (self._handle) <ide> self.emit('listening'); <ide> } <ide><path>lib/readline.js <ide> function Interface(input, output, completer, terminal) { <ide> let prompt = '> '; <ide> <ide> if (input && input.input) { <del> // an options object was given <add> // An options object was given <ide> output = input.output; <ide> completer = input.completer; <ide> terminal = input.terminal; <ide> function Interface(input, output, completer, terminal) { <ide> <ide> emitKeypressEvents(input, this); <ide> <del> // input usually refers to stdin <add> // `input` usually refers to stdin <ide> input.on('keypress', onkeypress); <ide> input.on('end', ontermend); <ide> <ide> Interface.prototype._tabComplete = function(lastKeypressWasTab) { <ide> } <ide> <ide> const completions = rv[0]; <del> const completeOn = rv[1]; // the text that was completed <add> const completeOn = rv[1]; // The text that was completed <ide> if (completions && completions.length) { <ide> // Apply/show completions. <ide> if (lastKeypressWasTab) { <ide> Interface.prototype._historyNext = function() { <ide> if (this.historyIndex > 0) { <ide> this.historyIndex--; <ide> this.line = this.history[this.historyIndex]; <del> this.cursor = this.line.length; // set cursor to end of line. <add> this.cursor = this.line.length; // Set cursor to end of line. <ide> this._refreshLine(); <ide> <ide> } else if (this.historyIndex === 0) { <ide> Interface.prototype._historyPrev = function() { <ide> if (this.historyIndex + 1 < this.history.length) { <ide> this.historyIndex++; <ide> this.line = this.history[this.historyIndex]; <del> this.cursor = this.line.length; // set cursor to end of line. <add> this.cursor = this.line.length; // Set cursor to end of line. <ide> <ide> this._refreshLine(); <ide> } <ide> function _ttyWriteDumb(s, key) { <ide> } <ide> <ide> switch (key.name) { <del> case 'return': // carriage return, i.e. \r <add> case 'return': // Carriage return, i.e. \r <ide> this._sawReturnAt = Date.now(); <ide> this._line(); <ide> break; <ide> function _ttyWriteDumb(s, key) { <ide> } <ide> } <ide> <del>// handle a write from the tty <add>// Handle a write from the tty <ide> Interface.prototype._ttyWrite = function(s, key) { <ide> const previousKey = this._previousKey; <ide> key = key || {}; <ide> Interface.prototype._ttyWrite = function(s, key) { <ide> this._deleteLineRight(); <ide> break; <ide> <del> case 'a': // go to the start of the line <add> case 'a': // Go to the start of the line <ide> this._moveCursor(-Infinity); <ide> break; <ide> <del> case 'e': // go to the end of the line <add> case 'e': // Go to the end of the line <ide> this._moveCursor(+Infinity); <ide> break; <ide> <ide> case 'b': // back one character <ide> this._moveCursor(-charLengthLeft(this.line, this.cursor)); <ide> break; <ide> <del> case 'f': // forward one character <add> case 'f': // Forward one character <ide> this._moveCursor(+charLengthAt(this.line, this.cursor)); <ide> break; <ide> <del> case 'l': // clear the whole screen <add> case 'l': // Clear the whole screen <ide> cursorTo(this.output, 0, 0); <ide> clearScreenDown(this.output); <ide> this._refreshLine(); <ide> Interface.prototype._ttyWrite = function(s, key) { <ide> this._historyNext(); <ide> break; <ide> <del> case 'p': // previous history item <add> case 'p': // Previous history item <ide> this._historyPrev(); <ide> break; <ide> <ide> Interface.prototype._ttyWrite = function(s, key) { <ide> this._sawReturnAt = 0; <ide> <ide> switch (key.name) { <del> case 'return': // carriage return, i.e. \r <add> case 'return': // Carriage return, i.e. \r <ide> this._sawReturnAt = Date.now(); <ide> this._line(); <ide> break; <ide><path>lib/repl.js <ide> function complete(line, callback) { <ide> if (kill.isFunction) <ide> tmp[kill.line] = ''; <ide> } <del> var flat = new ArrayStream(); // make a new "input" stream <del> var magic = new REPLServer('', flat); // make a nested REPL <add> var flat = new ArrayStream(); // Make a new "input" stream. <add> var magic = new REPLServer('', flat); // Make a nested REPL. <ide> replMap.set(magic, replMap.get(this)); <del> flat.run(tmp); // eval the flattened code <del> // all this is only profitable if the nested REPL <del> // does not have a bufferedCommand <add> flat.run(tmp); // `eval` the flattened code. <add> // All this is only profitable if the nested REPL does not have a <add> // bufferedCommand. <ide> if (!magic[kBufferedCommandSymbol]) { <ide> magic._domain.on('error', (err) => { throw err; }); <ide> return magic.complete(line, callback); <ide> function complete(line, callback) { <ide> // https://github.com/nodejs/node/issues/2119 <ide> } <ide> } <del> // works for non-objects <add> // Works for non-objects <ide> try { <ide> var sentinel = 5; <ide> var p; <ide><path>lib/url.js <ide> Url.prototype.parse = function parse(url, parseQueryString, slashesDenoteHost) { <ide> rest = rest.slice(proto.length); <ide> } <ide> <del> // figure out if it's got a host <add> // Figure out if it's got a host <ide> // user@server is *always* interpreted as a hostname, and url <ide> // resolution will treat //foo/bar as host=foo,path=bar because that's <ide> // how the browser resolves relative URLs. <ide> Url.prototype.parse = function parse(url, parseQueryString, slashesDenoteHost) { <ide> this.pathname = '/'; <ide> } <ide> <del> // to support http.request <add> // To support http.request <ide> if (this.pathname || this.search) { <ide> const p = this.pathname || ''; <ide> const s = this.search || ''; <ide> Url.prototype.resolveObject = function resolveObject(relative) { <ide> result.auth = relative.auth; <ide> result.hostname = relative.hostname || relative.host; <ide> result.port = relative.port; <del> // to support http.request <add> // To support http.request <ide> if (result.pathname || result.search) { <ide> var p = result.pathname || ''; <ide> var s = result.search || ''; <ide> Url.prototype.resolveObject = function resolveObject(relative) { <ide> result.search = relative.search; <ide> result.query = relative.query; <ide> } else if (relative.search !== null && relative.search !== undefined) { <del> // just pull out the search. <add> // Just pull out the search. <ide> // like href='?foo'. <ide> // Put this after the other two cases because it simplifies the booleans <ide> if (noLeadingSlashes) { <ide> Url.prototype.resolveObject = function resolveObject(relative) { <ide> } <ide> <ide> if (!srcPath.length) { <del> // no path at all. easy. <del> // we've already handled the other stuff above. <add> // No path at all. All other things were already handled above. <ide> result.pathname = null; <ide> // To support http.request <ide> if (result.search) { <ide><path>lib/zlib.js <ide> function processCallback() { <ide> self.push(null); <ide> } <ide> <del> // finished with the chunk. <add> // Finished with the chunk. <ide> this.buffer = null; <ide> this.cb(); <ide> } <ide><path>test/addons/openssl-client-cert-engine/test.js <ide> const server = https.createServer(serverOptions, (req, res) => { <ide> host: common.localhostIPv4, <ide> port: server.address().port, <ide> path: '/test', <del> clientCertEngine: engine, // engine will provide key+cert <add> clientCertEngine: engine, // `engine` will provide key+cert <ide> rejectUnauthorized: false, // Prevent failing on self-signed certificates <ide> headers: {} <ide> }; <ide><path>test/async-hooks/test-async-await.js <ide> const initHooks = require('./init-hooks'); <ide> const util = require('util'); <ide> <ide> const sleep = util.promisify(setTimeout); <del>// either 'inited' or 'resolved' <add>// Either 'inited' or 'resolved' <ide> const promisesInitState = new Map(); <ide> // Either 'before' or 'after' AND asyncId must be present in the other map <ide> const promisesExecutionState = new Map(); <ide><path>test/async-hooks/test-graph.signal.js <ide> const { exec } = require('child_process'); <ide> const hooks = initHooks(); <ide> <ide> hooks.enable(); <del>const interval = setInterval(() => {}, 9999); // keep event loop open <add>const interval = setInterval(() => {}, 9999); // Keep event loop open <ide> process.on('SIGUSR2', common.mustCall(onsigusr2, 2)); <ide> <ide> let count = 0; <ide> function onsigusr2() { <ide> } <ide> <ide> function onsigusr2Again() { <del> clearInterval(interval); // let the event loop close <add> clearInterval(interval); // Let the event loop close <ide> } <ide> <ide> process.on('exit', onexit); <ide><path>test/async-hooks/test-graph.statwatcher.js <ide> const hooks = initHooks(); <ide> hooks.enable(); <ide> <ide> function onchange() { } <del>// install first file watcher <add>// Install first file watcher <ide> fs.watchFile(__filename, onchange); <ide> <del>// install second file watcher <add>// Install second file watcher <ide> fs.watchFile(commonPath, onchange); <ide> <del>// remove first file watcher <add>// Remove first file watcher <ide> fs.unwatchFile(__filename); <ide> <del>// remove second file watcher <add>// Remove second file watcher <ide> fs.unwatchFile(commonPath); <ide> <ide> process.on('exit', onexit); <ide><path>test/async-hooks/test-immediate.js <ide> const { checkInvocations } = require('./hook-checks'); <ide> const hooks = initHooks(); <ide> hooks.enable(); <ide> <del>// install first immediate <add>// Install first immediate <ide> setImmediate(common.mustCall(onimmediate)); <ide> <ide> const as = hooks.activitiesOfTypes('Immediate'); <ide> function onimmediate() { <ide> checkInvocations(imd1, { init: 1, before: 1 }, <ide> 'imd1: when first set immediate triggered'); <ide> <del> // install second immediate <add> // Install second immediate <ide> setImmediate(common.mustCall(onimmediateTwo)); <ide> as = hooks.activitiesOfTypes('Immediate'); <ide> assert.strictEqual(as.length, 2); <ide><path>test/async-hooks/test-statwatcher.js <ide> const hooks = initHooks(); <ide> hooks.enable(); <ide> <ide> function onchange() {} <del>// install first file watcher <add>// Install first file watcher <ide> const w1 = fs.watchFile(file1, { interval: 10 }, onchange); <ide> <ide> let as = hooks.activitiesOfTypes('STATWATCHER'); <ide> assert.strictEqual(statwatcher1.triggerAsyncId, 1); <ide> checkInvocations(statwatcher1, { init: 1 }, <ide> 'watcher1: when started to watch file'); <ide> <del>// install second file watcher <add>// Install second file watcher <ide> const w2 = fs.watchFile(file2, { interval: 10 }, onchange); <ide> as = hooks.activitiesOfTypes('STATWATCHER'); <ide> assert.strictEqual(as.length, 2); <ide><path>test/async-hooks/test-timers.setTimeout.js <ide> const TIMEOUT = common.platformTimeout(100); <ide> const hooks = initHooks(); <ide> hooks.enable(); <ide> <del>// install first timeout <add>// Install first timeout <ide> setTimeout(common.mustCall(ontimeout), TIMEOUT); <ide> const as = hooks.activitiesOfTypes('Timeout'); <ide> assert.strictEqual(as.length, 1); <ide><path>test/async-hooks/test-ttywrap.readstream.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> <del>// general hook test setup <add>// General hook test setup <ide> const tick = require('../common/tick'); <ide> const initHooks = require('./init-hooks'); <ide> const { checkInvocations } = require('./hook-checks'); <ide><path>test/async-hooks/test-ttywrap.writestream.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> <del>// general hook test setup <add>// General hook test setup <ide> const tick = require('../common/tick'); <ide> const initHooks = require('./init-hooks'); <ide> const { checkInvocations } = require('./hook-checks'); <ide><path>test/common/index.js <ide> module.exports = { <ide> if (opensslCli !== null) return opensslCli; <ide> <ide> if (process.config.variables.node_shared_openssl) { <del> // use external command <add> // Use external command <ide> opensslCli = 'openssl'; <ide> } else { <ide> // Use command built from sources included in Node.js repository <ide><path>test/es-module/test-esm-preserve-symlinks-main.js <ide> function doTest(flags, done) { <ide> <ide> // First test the commonjs module loader <ide> doTest([], () => { <del> // now test the new loader <add> // Now test the new loader <ide> doTest(['--experimental-modules'], () => {}); <ide> }); <ide><path>test/js-native-api/test_general/test.js <ide> class ExtendedClass extends BaseClass { <ide> const baseObject = new BaseClass(); <ide> const extendedObject = new ExtendedClass(); <ide> <del>// test napi_strict_equals <add>// Test napi_strict_equals <ide> assert.ok(test_general.testStrictEquals(val1, val1)); <ide> assert.strictEqual(test_general.testStrictEquals(val1, val2), false); <ide> assert.ok(test_general.testStrictEquals(val2, val3)); <ide> <del>// test napi_get_prototype <add>// Test napi_get_prototype <ide> assert.strictEqual(test_general.testGetPrototype(baseObject), <ide> Object.getPrototypeOf(baseObject)); <ide> assert.strictEqual(test_general.testGetPrototype(extendedObject), <ide><path>test/js-native-api/test_number/test.js <ide> const assert = require('assert'); <ide> const test_number = require(`./build/${common.buildType}/test_number`); <ide> <ide> <del>// testing api calls for number <add>// Testing api calls for number <ide> function testNumber(num) { <ide> assert.strictEqual(num, test_number.Test(num)); <ide> } <ide><path>test/js-native-api/test_string/test.js <ide> const common = require('../../common'); <ide> const assert = require('assert'); <ide> <del>// testing api calls for string <add>// Testing api calls for string <ide> const test_string = require(`./build/${common.buildType}/test_string`); <ide> <ide> const empty = ''; <ide><path>test/js-native-api/test_symbol/test1.js <ide> const common = require('../../common'); <ide> const assert = require('assert'); <ide> <del>// testing api calls for symbol <add>// Testing api calls for symbol <ide> const test_symbol = require(`./build/${common.buildType}/test_symbol`); <ide> <ide> const sym = test_symbol.New('test'); <ide><path>test/js-native-api/test_symbol/test2.js <ide> const common = require('../../common'); <ide> const assert = require('assert'); <ide> <del>// testing api calls for symbol <add>// Testing api calls for symbol <ide> const test_symbol = require(`./build/${common.buildType}/test_symbol`); <ide> <ide> const fooSym = test_symbol.New('foo'); <ide><path>test/js-native-api/test_symbol/test3.js <ide> const common = require('../../common'); <ide> const assert = require('assert'); <ide> <del>// testing api calls for symbol <add>// Testing api calls for symbol <ide> const test_symbol = require(`./build/${common.buildType}/test_symbol`); <ide> <ide> assert.notStrictEqual(test_symbol.New(), test_symbol.New()); <ide><path>test/known_issues/test-vm-ownkeys.js <ide> const ctx = vm.createContext(sandbox); <ide> const nativeKeys = vm.runInNewContext('Reflect.ownKeys(this);'); <ide> const ownKeys = vm.runInContext('Reflect.ownKeys(this);', ctx); <ide> const restKeys = ownKeys.filter((key) => !nativeKeys.includes(key)); <del>// this should not fail <add>// This should not fail <ide> assert.deepStrictEqual(Array.from(restKeys), ['a', 'b', sym1, sym2]); <ide><path>test/known_issues/test-vm-ownpropertynames.js <ide> const ctx = vm.createContext(sandbox); <ide> const nativeNames = vm.runInNewContext('Object.getOwnPropertyNames(this);'); <ide> const ownNames = vm.runInContext('Object.getOwnPropertyNames(this);', ctx); <ide> const restNames = ownNames.filter((name) => !nativeNames.includes(name)); <del>// this should not fail <add>// This should not fail <ide> assert.deepStrictEqual(Array.from(restNames), ['a', 'b']); <ide><path>test/known_issues/test-vm-ownpropertysymbols.js <ide> const ctx = vm.createContext(sandbox); <ide> const nativeSym = vm.runInNewContext('Object.getOwnPropertySymbols(this);'); <ide> const ownSym = vm.runInContext('Object.getOwnPropertySymbols(this);', ctx); <ide> const restSym = ownSym.filter((sym) => !nativeSym.includes(sym)); <del>// this should not fail <add>// This should not fail <ide> assert.deepStrictEqual(Array.from(restSym), [sym1, sym2]); <ide><path>test/message/throw_custom_error.js <ide> 'use strict'; <ide> require('../common'); <ide> <del>// custom error throwing <add>// Custom error throwing <ide> // eslint-disable-next-line no-throw-literal <ide> throw ({ name: 'MyCustomError', message: 'This is a custom message' }); <ide><path>test/message/throw_non_error.js <ide> 'use strict'; <ide> require('../common'); <ide> <del>// custom error throwing <add>// Custom error throwing <ide> // eslint-disable-next-line no-throw-literal <ide> throw ({ foo: 'bar' }); <ide><path>test/parallel/test-assert-deep.js <ide> assertNotDeepOrStrict( <ide> new Map([['1', 5], [0, 5], ['0', 5]]) <ide> ); <ide> <del>// undefined value in Map <add>// Undefined value in Map <ide> assertDeepAndStrictEqual( <ide> new Map([[1, undefined]]), <ide> new Map([[1, undefined]]) <ide><path>test/parallel/test-buffer-alloc.js <ide> assert.strictEqual(Buffer.from('A', 'base64').length, 0); <ide> <ide> <ide> { <del> // test an invalid slice end. <add> // Test an invalid slice end. <ide> const b = Buffer.from([1, 2, 3, 4, 5]); <ide> const b2 = b.toString('hex', 1, 10000); <ide> const b3 = b.toString('hex', 1, 5); <ide> assert.strictEqual(x.inspect(), '<Buffer 81 a3 66 6f 6f a3 62 61 72>'); <ide> } <ide> <ide> { <del> // test for buffer overrun <add> // Test for buffer overrun <ide> const buf = Buffer.from([0, 0, 0, 0, 0]); // length: 5 <ide> const sub = buf.slice(0, 4); // length: 4 <ide> assert.strictEqual(sub.write('12345', 'latin1'), 4); <ide> assert.strictEqual(x.inspect(), '<Buffer 81 a3 66 6f 6f a3 62 61 72>'); <ide> } <ide> <ide> { <del> // test alloc with fill option <add> // Test alloc with fill option <ide> const buf = Buffer.alloc(5, '800A', 'hex'); <ide> assert.strictEqual(buf[0], 128); <ide> assert.strictEqual(buf[1], 10); <ide><path>test/parallel/test-buffer-compare-offset.js <ide> assert.strictEqual(a.compare(b, 0, 7, 4, 6), -1); <ide> // zero length target <ide> assert.strictEqual(a.compare(b, 0, null), 1); <ide> <del>// coerces to targetEnd == 5 <add>// Coerces to targetEnd == 5 <ide> assert.strictEqual(a.compare(b, 0, { valueOf: () => 5 }), -1); <ide> <ide> // zero length target <ide><path>test/parallel/test-buffer-copy.js <ide> common.expectsError( <ide> } <ide> } <ide> <del>// throw with negative sourceEnd <add>// Throw with negative sourceEnd <ide> common.expectsError( <ide> () => b.copy(c, 0, -1), errorProperty); <ide> <ide><path>test/parallel/test-buffer-includes.js <ide> assert.strictEqual( <ide> true <ide> ); <ide> <del>// test base64 encoding <add>// Test base64 encoding <ide> assert.strictEqual( <ide> Buffer.from(b.toString('base64'), 'base64') <ide> .includes('ZA==', 0, 'base64'), <ide> assert.strictEqual( <ide> true <ide> ); <ide> <del>// test latin1 encoding <add>// Test latin1 encoding <ide> assert.strictEqual( <ide> Buffer.from(b.toString('latin1'), 'latin1') <ide> .includes('d', 0, 'latin1'), <ide> assert.strictEqual( <ide> true <ide> ); <ide> <del>// test binary encoding <add>// Test binary encoding <ide> assert.strictEqual( <ide> Buffer.from(b.toString('binary'), 'binary') <ide> .includes('d', 0, 'binary'), <ide><path>test/parallel/test-buffer-indexof.js <ide> assert.strictEqual( <ide> 3 <ide> ); <ide> <del>// test base64 encoding <add>// Test base64 encoding <ide> assert.strictEqual( <ide> Buffer.from(b.toString('base64'), 'base64') <ide> .indexOf('ZA==', 0, 'base64'), <ide> assert.strictEqual( <ide> 3 <ide> ); <ide> <del>// test latin1 encoding <add>// Test latin1 encoding <ide> assert.strictEqual( <ide> Buffer.from(b.toString('latin1'), 'latin1') <ide> .indexOf('d', 0, 'latin1'), <ide> assert.strictEqual( <ide> 0 <ide> ); <ide> <del>// test binary encoding <add>// Test binary encoding <ide> assert.strictEqual( <ide> Buffer.from(b.toString('binary'), 'binary') <ide> .indexOf('d', 0, 'binary'), <ide><path>test/parallel/test-buffer-iterator.js <ide> const buffer = Buffer.from([1, 2, 3, 4, 5]); <ide> let arr; <ide> let b; <ide> <del>// buffers should be iterable <add>// Buffers should be iterable <ide> <ide> arr = []; <ide> <ide><path>test/parallel/test-buffer-slow.js <ide> const SlowBuffer = buffer.SlowBuffer; <ide> <ide> const ones = [1, 1, 1, 1]; <ide> <del>// should create a Buffer <add>// Should create a Buffer <ide> let sb = SlowBuffer(4); <ide> assert(sb instanceof Buffer); <ide> assert.strictEqual(sb.length, 4); <ide> for (const [key, value] of sb.entries()) { <ide> // underlying ArrayBuffer should have the same length <ide> assert.strictEqual(sb.buffer.byteLength, 4); <ide> <del>// should work without new <add>// Should work without new <ide> sb = SlowBuffer(4); <ide> assert(sb instanceof Buffer); <ide> assert.strictEqual(sb.length, 4); <ide> for (const [key, value] of sb.entries()) { <ide> assert.deepStrictEqual(value, ones[key]); <ide> } <ide> <del>// should work with edge cases <add>// Should work with edge cases <ide> assert.strictEqual(SlowBuffer(0).length, 0); <ide> try { <ide> assert.strictEqual( <ide><path>test/parallel/test-child-process-constructor.js <ide> function typeName(value) { <ide> }); <ide> } <ide> <del>// test that we can call spawn <add>// Test that we can call spawn <ide> const child = new ChildProcess(); <ide> child.spawn({ <ide> file: process.execPath, <ide><path>test/parallel/test-child-process-disconnect.js <ide> if (process.argv[2] === 'child') { <ide> // The process should also self terminate without using signals <ide> child.on('exit', common.mustCall()); <ide> <del> // when child is listening <add> // When child is listening <ide> child.on('message', function(obj) { <ide> if (obj && obj.msg === 'ready') { <ide> <ide> if (process.argv[2] === 'child') { <ide> socket.on('data', function(data) { <ide> data = data.toString(); <ide> <del> // ready to be disconnected <add> // Ready to be disconnected <ide> if (data === 'ready') { <ide> child.disconnect(); <ide> assert.throws( <ide> if (process.argv[2] === 'child') { <ide> return; <ide> } <ide> <del> // disconnect is emitted <add> // 'disconnect' is emitted <ide> childFlag = (data === 'true'); <ide> }); <ide> <ide><path>test/parallel/test-child-process-exec-error.js <ide> function test(fn, code) { <ide> } <ide> <ide> if (common.isWindows) { <del> test(child_process.exec, 1); // exit code of cmd.exe <add> test(child_process.exec, 1); // Exit code of cmd.exe <ide> } else { <del> test(child_process.exec, 127); // exit code of /bin/sh <add> test(child_process.exec, 127); // Exit code of /bin/sh <ide> } <ide> <ide> test(child_process.execFile, 'ENOENT'); <ide><path>test/parallel/test-child-process-fork-net-server.js <ide> if (process.argv[2] === 'child') { <ide> } <ide> <ide> } else if (msg.what === 'connection') { <del> // child got connection <add> // Child got connection <ide> connections.done(); <ide> } else if (msg.what === 'close') { <ide> child.removeListener('message', messageHandlers); <ide><path>test/parallel/test-child-process-fork-net.js <ide> if (process.argv[2] === 'child') { <ide> console.error(`[${id}] socket.end ${m}`); <ide> }); <ide> <del> // store the unfinished socket <add> // Store the unfinished socket <ide> if (m === 'write') { <ide> needEnd.push(socket); <ide> } <ide><path>test/parallel/test-child-process-spawnsync-input.js <ide> const spawnSync = require('child_process').spawnSync; <ide> const msgOut = 'this is stdout'; <ide> const msgErr = 'this is stderr'; <ide> <del>// this is actually not os.EOL? <add>// This is actually not os.EOL? <ide> const msgOutBuf = Buffer.from(`${msgOut}\n`); <ide> const msgErrBuf = Buffer.from(`${msgErr}\n`); <ide> <ide><path>test/parallel/test-child-process-stdio-inherit.js <ide> function grandparent() { <ide> } <ide> <ide> function parent() { <del> // should not immediately exit. <add> // Should not immediately exit. <ide> spawn('cat', [], { stdio: 'inherit' }); <ide> } <ide><path>test/parallel/test-cluster-bind-twice.js <ide> if (!id) { <ide> })); <ide> }, 2)); <ide> } else { <del> assert(0); // bad command line argument <add> assert(0); // Bad command line argument <ide> } <ide> <ide> function startWorker() { <ide><path>test/parallel/test-cluster-disconnect.js <ide> if (cluster.isWorker) { <ide> const servers = 2; <ide> const serverPorts = new Set(); <ide> <del> // test a single TCP server <add> // Test a single TCP server <ide> const testConnection = (port, cb) => { <ide> const socket = net.connect(port, '127.0.0.1', () => { <ide> // buffer result <ide><path>test/parallel/test-cluster-eaccess.js <ide> if (cluster.isMaster && process.argv.length !== 3) { <ide> server.on('error', function(err) { <ide> // Message to child process tells it to exit <ide> cp.send('end'); <del> // propagate error to parent <add> // Propagate error to parent <ide> process.send(err); <ide> }); <ide> })); <ide><path>test/parallel/test-cluster-master-kill.js <ide> const cluster = require('cluster'); <ide> <ide> if (cluster.isWorker) { <ide> <del> // keep the worker alive <add> // Keep the worker alive <ide> const http = require('http'); <ide> http.Server().listen(0, '127.0.0.1'); <ide> <ide> if (cluster.isWorker) { <ide> pid: worker.process.pid <ide> }); <ide> <del> // terminate the cluster process <add> // Terminate the cluster process <ide> worker.once('listening', common.mustCall(() => { <ide> setTimeout(() => { <ide> process.exit(0); <ide> if (cluster.isWorker) { <ide> // Make sure that the master died on purpose <ide> assert.strictEqual(code, 0); <ide> <del> // check worker process status <add> // Check worker process status <ide> const pollWorker = () => { <ide> alive = common.isAlive(pid); <ide> if (alive) { <ide><path>test/parallel/test-cluster-worker-exit.js <ide> if (cluster.isWorker) { <ide> }; <ide> } <ide> <del>// some helper functions ... <add>// Some helper functions ... <ide> <ide> function checkResults(expected_results, results) { <ide> for (const k in expected_results) { <ide><path>test/parallel/test-cluster-worker-kill.js <ide> if (cluster.isWorker) { <ide> }); <ide> } <ide> <del>// some helper functions ... <add>// Some helper functions ... <ide> <ide> function checkResults(expected_results, results) { <ide> for (const k in expected_results) { <ide><path>test/parallel/test-console.js <ide> console.dir(custom_inspect, { showHidden: false }); <ide> console.dir({ foo: { bar: { baz: true } } }, { depth: 0 }); <ide> console.dir({ foo: { bar: { baz: true } } }, { depth: 1 }); <ide> <del>// test console.dirxml() <add>// Test console.dirxml() <ide> console.dirxml(custom_inspect, custom_inspect); <ide> console.dirxml( <ide> { foo: { bar: { baz: true } } }, <ide> { foo: { bar: { quux: false } } }, <ide> { foo: { bar: { quux: true } } } <ide> ); <ide> <del>// test console.trace() <add>// Test console.trace() <ide> console.trace('This is a %j %d', { formatted: 'trace' }, 10, 'foo'); <ide> <ide> // Test console.time() and console.timeEnd() output <ide><path>test/parallel/test-crypto-cipher-decipher.js <ide> function testCipher1(key) { <ide> <ide> assert.strictEqual(txt, plaintext); <ide> <del> // streaming cipher interface <add> // Streaming cipher interface <ide> // NB: In real life, it's not guaranteed that you can get all of it <ide> // in a single read() like this. But in this case, we know it's <ide> // quite small, so there's no harm. <ide><path>test/parallel/test-crypto-cipheriv-decipheriv.js <ide> function testCipher1(key, iv) { <ide> assert.strictEqual(txt, plaintext, <ide> `encryption/decryption with key ${key} and iv ${iv}`); <ide> <del> // streaming cipher interface <add> // Streaming cipher interface <ide> // NB: In real life, it's not guaranteed that you can get all of it <ide> // in a single read() like this. But in this case, we know it's <ide> // quite small, so there's no harm. <ide><path>test/parallel/test-crypto-dh.js <ide> if (availableCurves.has('prime256v1') && availableCurves.has('secp256k1')) { <ide> assert(firstByte === 2 || firstByte === 3); <ide> firstByte = ecdh1.getPublicKey('buffer', 'hybrid')[0]; <ide> assert(firstByte === 6 || firstByte === 7); <del> // format value should be string <add> // Format value should be string <ide> <ide> common.expectsError( <ide> () => ecdh1.getPublicKey('buffer', 10), <ide><path>test/parallel/test-crypto-keygen.js <ide> const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); <ide> }); <ide> } <ide> <del> // cipher of invalid type. <add> // Cipher of invalid type. <ide> for (const cipher of [0, true, {}]) { <ide> common.expectsError(() => generateKeyPairSync('rsa', { <ide> modulusLength: 4096, <ide><path>test/parallel/test-crypto-padding-aes256.js <ide> function decrypt(val, pad) { <ide> // echo 0123456789abcdef0123456789abcdef \ <ide> // | openssl enc -e -aes256 -nopad -K <key> -iv <iv> \ <ide> // | openssl enc -d -aes256 -nopad -K <key> -iv <iv> <del>let plaintext = '0123456789abcdef0123456789abcdef'; // multiple of block size <add>let plaintext = '0123456789abcdef0123456789abcdef'; // Multiple of block size <ide> let encrypted = encrypt(plaintext, false); <ide> let decrypted = decrypt(encrypted, false); <ide> assert.strictEqual(decrypted, plaintext); <ide><path>test/parallel/test-crypto-scrypt.js <ide> const good = [ <ide> const bad = [ <ide> { N: 1, p: 1, r: 1 }, // N < 2 <ide> { N: 3, p: 1, r: 1 }, // Not power of 2. <del> { N: 1, cost: 1 }, // both N and cost <del> { p: 1, parallelization: 1 }, // both p and parallelization <del> { r: 1, blockSize: 1 } // both r and blocksize <add> { N: 1, cost: 1 }, // Both N and cost <add> { p: 1, parallelization: 1 }, // Both p and parallelization <add> { r: 1, blockSize: 1 } // Both r and blocksize <ide> ]; <ide> <ide> // Test vectors where 128*N*r exceeds maxmem. <ide><path>test/parallel/test-debugger-pid.js <ide> const spawn = require('child_process').spawn; <ide> <ide> let buffer = ''; <ide> <del>// connect to debug agent <add>// Connect to debug agent <ide> const interfacer = spawn(process.execPath, ['debug', '-p', '655555']); <ide> <ide> interfacer.stdout.setEncoding('utf-8'); <ide><path>test/parallel/test-dgram-bind.js <ide> socket.on('listening', common.mustCall(() => { <ide> socket.close(); <ide> })); <ide> <del>const result = socket.bind(); // should not throw <add>const result = socket.bind(); // Should not throw. <ide> <del>assert.strictEqual(result, socket); // should have returned itself <add>assert.strictEqual(result, socket); // Should have returned itself. <ide><path>test/parallel/test-dgram-close-in-listening.js <ide> socket.on('listening', function() { <ide> socket.close(); <ide> }); <ide> <del>// get a random port for send <add>// Get a random port for send <ide> const portGetter = dgram.createSocket('udp4') <ide> .bind(0, 'localhost', common.mustCall(() => { <ide> // Adds a listener to 'listening' to send the data when <ide><path>test/parallel/test-dgram-close-is-not-callback.js <ide> const buf = Buffer.alloc(1024, 42); <ide> <ide> const socket = dgram.createSocket('udp4'); <ide> <del>// get a random port for send <add>// Get a random port for send <ide> const portGetter = dgram.createSocket('udp4') <ide> .bind(0, 'localhost', common.mustCall(() => { <ide> socket.send(buf, 0, buf.length, <ide><path>test/parallel/test-dgram-close.js <ide> const buf = Buffer.alloc(1024, 42); <ide> let socket = dgram.createSocket('udp4'); <ide> const { handle } = socket[kStateSymbol]; <ide> <del>// get a random port for send <add>// Get a random port for send <ide> const portGetter = dgram.createSocket('udp4') <ide> .bind(0, 'localhost', common.mustCall(() => { <ide> socket.send(buf, 0, buf.length, <ide><path>test/parallel/test-file-write-stream2.js <ide> for (let i = 0; i < 11; i++) { <ide> const ret = file.write(String(i)); <ide> console.error(`${i} ${ret}`); <ide> <del> // return false when i hits 10 <add> // Return false when i hits 10 <ide> assert.strictEqual(ret, i !== 10); <ide> } <ide> cb_occurred += 'write '; <ide><path>test/parallel/test-fs-error-messages.js <ide> function re(literals, ...values) { <ide> ); <ide> } <ide> <del>// link nonexistent file <add>// Link nonexistent file <ide> { <ide> const validateError = (err) => { <ide> assert.strictEqual(nonexistentFile, err.path); <ide> function re(literals, ...values) { <ide> ); <ide> } <ide> <del>// rename non-empty directory <add>// Rename non-empty directory <ide> { <ide> const validateError = (err) => { <ide> assert.strictEqual(existingDir, err.path); <ide><path>test/parallel/test-fs-mkdir.js <ide> function nextdir() { <ide> const pathname = path.join(tmpdir.path, nextdir(), nextdir()); <ide> <ide> fs.mkdirSync(pathname, { recursive: true }); <del> // should not cause an error. <add> // Should not cause an error. <ide> fs.mkdirSync(pathname, { recursive: true }); <ide> <ide> const exists = fs.existsSync(pathname); <ide> function nextdir() { <ide> })); <ide> } <ide> <del>// mkdirp when path is a file. <add>// `mkdirp` when path is a file. <ide> { <ide> const pathname = path.join(tmpdir.path, nextdir(), nextdir()); <ide> <ide><path>test/parallel/test-fs-promises.js <ide> async function getHandle(dest) { <ide> assert.deepStrictEqual((await readFile(dest)).toString(), 'hello'); <ide> } <ide> <del> // invalid change of ownership <add> // Invalid change of ownership <ide> { <ide> const handle = await getHandle(dest); <ide> <ide> async function getHandle(dest) { <ide> }); <ide> } <ide> <del> // set modification times <add> // Set modification times <ide> { <ide> const handle = await getHandle(dest); <ide> <ide> async function getHandle(dest) { <ide> await unlink(newFile); <ide> } <ide> <del> // mkdir when options is number. <add> // `mkdir` when options is number. <ide> { <ide> const dir = path.join(tmpDir, nextdir()); <ide> await mkdir(dir, 777); <ide> const stats = await stat(dir); <ide> assert(stats.isDirectory()); <ide> } <ide> <del> // mkdir when options is string. <add> // `mkdir` when options is string. <ide> { <ide> const dir = path.join(tmpDir, nextdir()); <ide> await mkdir(dir, '777'); <ide> async function getHandle(dest) { <ide> assert(stats.isDirectory()); <ide> } <ide> <del> // mkdirp when path is a file. <add> // `mkdirp` when path is a file. <ide> { <ide> const dir = path.join(tmpDir, nextdir(), nextdir()); <ide> await mkdir(path.dirname(dir)); <ide><path>test/parallel/test-fs-realpath-on-substed-drive.js <ide> const spawnSync = require('child_process').spawnSync; <ide> <ide> let result; <ide> <del>// create a subst drive <add>// Create a subst drive <ide> const driveLetters = 'ABCDEFGHIJKLMNOPQRSTUWXYZ'; <ide> let drive; <ide> let i; <ide><path>test/parallel/test-fs-realpath.js <ide> tmpdir.refresh(); <ide> let root = '/'; <ide> let assertEqualPath = assert.strictEqual; <ide> if (common.isWindows) { <del> // something like "C:\\" <add> // Something like "C:\\" <ide> root = process.cwd().substr(0, 3); <ide> assertEqualPath = function(path_left, path_right, message) { <ide> assert <ide><path>test/parallel/test-http-agent-getname.js <ide> const tmpdir = require('../common/tmpdir'); <ide> <ide> const agent = new http.Agent(); <ide> <del>// default to localhost <add>// Default to localhost <ide> assert.strictEqual( <ide> agent.getName({ <ide> port: 80, <ide><path>test/parallel/test-http-agent-keepalive.js <ide> function remoteError() { <ide> <ide> server.listen(0, common.mustCall(() => { <ide> name = `localhost:${server.address().port}:`; <del> // request first, and keep alive <add> // Request first, and keep alive <ide> get('/first', common.mustCall((res) => { <ide> assert.strictEqual(res.statusCode, 200); <ide> res.on('data', checkDataAndSockets); <ide><path>test/parallel/test-http-client-override-global-agent.js <ide> server.listen(0, common.mustCall(() => { <ide> http.globalAgent = agent; <ide> <ide> makeRequest(); <del> assert(agent.sockets.hasOwnProperty(name)); // agent has indeed been used <add> assert(agent.sockets.hasOwnProperty(name)); // Agent has indeed been used <ide> })); <ide> <ide> function makeRequest() { <ide><path>test/parallel/test-http-client-timeout-agent.js <ide> const server = http.createServer((req, res) => { <ide> const m = /\/(.*)/.exec(req.url); <ide> const reqid = parseInt(m[1], 10); <ide> if (reqid % 2) { <del> // do not reply the request <add> // Do not reply the request <ide> } else { <ide> res.writeHead(200, { 'Content-Type': 'text/plain' }); <ide> res.write(reqid.toString()); <ide><path>test/parallel/test-http-many-ended-pipelines.js <ide> 'use strict'; <ide> require('../common'); <ide> <del>// no warnings should happen! <add>// No warnings should happen! <ide> const trace = console.trace; <ide> console.trace = function() { <ide> trace.apply(console, arguments); <ide><path>test/parallel/test-http-outgoing-internal-headers.js <ide> const warn = 'OutgoingMessage.prototype._headers is deprecated'; <ide> common.expectWarning('DeprecationWarning', warn, 'DEP0066'); <ide> <ide> { <del> // tests for _headers get method <add> // Tests for _headers get method <ide> const outgoingMessage = new OutgoingMessage(); <ide> outgoingMessage.getHeaders = common.mustCall(); <ide> outgoingMessage._headers; <ide> } <ide> <ide> { <del> // tests for _headers set method <add> // Tests for _headers set method <ide> const outgoingMessage = new OutgoingMessage(); <ide> outgoingMessage._headers = { <ide> host: 'risingstack.com', <ide><path>test/parallel/test-http-res-write-end-dont-take-array.js <ide> server.once('request', common.mustCall((req, res) => { <ide> server.on('request', common.mustCall((req, res) => { <ide> res.end(Buffer.from('asdf')); <ide> })); <del> // write should accept string <add> // `res.write()` should accept `string`. <ide> res.write('string'); <del> // write should accept buffer <add> // `res.write()` should accept `buffer`. <ide> res.write(Buffer.from('asdf')); <ide> <ide> const expectedError = { <ide> code: 'ERR_INVALID_ARG_TYPE', <ide> name: 'TypeError', <ide> }; <ide> <del> // Write should not accept an Array <add> // `res.write()` should not accept an Array. <ide> assert.throws( <ide> () => { <ide> res.write(['array']); <ide> }, <ide> expectedError <ide> ); <ide> <del> // End should not accept an Array <add> // `res.end()` should not accept an Array. <ide> assert.throws( <ide> () => { <ide> res.end(['moo']); <ide> }, <ide> expectedError <ide> ); <ide> <del> // end should accept string <add> // `res.end()` should accept `string`. <ide> res.end('string'); <ide> })); <ide> <ide> server.listen(0, function() { <del> // Just make a request, other tests handle responses <add> // Just make a request, other tests handle responses. <ide> http.get({ port: this.address().port }, (res) => { <ide> res.resume(); <ide> // Do it again to test .end(Buffer); <ide><path>test/parallel/test-http-set-cookies.js <ide> server.on('listening', function() { <ide> }); <ide> }); <ide> <del> // two set-cookie headers <add> // Two set-cookie headers <ide> <ide> http.get({ port: this.address().port, path: '/two' }, function(res) { <ide> assert.deepStrictEqual(res.headers['set-cookie'], ['A', 'B']); <ide><path>test/parallel/test-http-slow-headers-keepalive.js <ide> server.once('timeout', common.mustCall((socket) => { <ide> server.listen(0, () => { <ide> const client = net.connect(server.address().port); <ide> client.write(headers); <del> // finish the first request <add> // Finish the first request <ide> client.write('\r\n'); <ide> // second request <ide> client.write(headers); <ide><path>test/parallel/test-http-url.parse-auth-with-header-in-request.js <ide> function check(request) { <ide> } <ide> <ide> const server = http.createServer(function(request, response) { <del> // run the check function <add> // Run the check function <ide> check(request); <ide> response.writeHead(200, {}); <ide> response.end('ok'); <ide><path>test/parallel/test-http-url.parse-auth.js <ide> function check(request) { <ide> } <ide> <ide> const server = http.createServer(function(request, response) { <del> // run the check function <add> // Run the check function <ide> check(request); <ide> response.writeHead(200, {}); <ide> response.end('ok'); <ide><path>test/parallel/test-http-url.parse-basic.js <ide> const url = require('url'); <ide> <ide> let testURL; <ide> <del>// make sure the basics work <add>// Make sure the basics work <ide> function check(request) { <ide> // Default method should still be 'GET' <ide> assert.strictEqual(request.method, 'GET'); <ide> function check(request) { <ide> } <ide> <ide> const server = http.createServer(function(request, response) { <del> // run the check function <add> // Run the check function <ide> check(request); <ide> response.writeHead(200, {}); <ide> response.end('ok'); <ide><path>test/parallel/test-http-url.parse-https.request.js <ide> const httpsOptions = { <ide> }; <ide> <ide> function check(request) { <del> // assert that I'm https <add> // Assert that I'm https <ide> assert.ok(request.socket._secureEstablished); <ide> } <ide> <ide> const server = https.createServer(httpsOptions, function(request, response) { <del> // run the check function <add> // Run the check function <ide> check(request); <ide> response.writeHead(200, {}); <ide> response.end('ok'); <ide><path>test/parallel/test-http-url.parse-path.js <ide> const http = require('http'); <ide> const url = require('url'); <ide> <ide> function check(request) { <del> // a path should come over <add> // A path should come over <ide> assert.strictEqual(request.url, '/asdf'); <ide> } <ide> <ide> const server = http.createServer(function(request, response) { <del> // run the check function <add> // Run the check function <ide> check(request); <ide> response.writeHead(200, {}); <ide> response.end('ok'); <ide><path>test/parallel/test-http-url.parse-post.js <ide> function check(request) { <ide> } <ide> <ide> const server = http.createServer(function(request, response) { <del> // run the check function <add> // Run the check function <ide> check(request); <ide> response.writeHead(200, {}); <ide> response.end('ok'); <ide><path>test/parallel/test-http-url.parse-search.js <ide> function check(request) { <ide> } <ide> <ide> const server = http.createServer(function(request, response) { <del> // run the check function <add> // Run the check function <ide> check(request); <ide> response.writeHead(200, {}); <ide> response.end('ok'); <ide><path>test/parallel/test-http-write-callbacks.js <ide> server.on('checkContinue', (req, res) => { <ide> server.close(); <ide> assert.strictEqual(req.method, 'PUT'); <ide> res.writeContinue(() => { <del> // continue has been written <add> // Continue has been written <ide> req.on('end', () => { <ide> res.write('asdf', (er) => { <ide> assert.ifError(er); <ide><path>test/parallel/test-http2-binding.js <ide> assert.strictEqual(settings.maxFrameSize, 16384); <ide> assert.strictEqual(binding.nghttp2ErrorString(-517), <ide> 'GOAWAY has already been sent'); <ide> <del>// assert constants are present <add>// Assert constants are present <ide> assert(binding.constants); <ide> assert.strictEqual(typeof binding.constants, 'object'); <ide> const constants = binding.constants; <ide><path>test/parallel/test-http2-client-destroy.js <ide> const Countdown = require('../common/countdown'); <ide> })); <ide> } <ide> <del>// test destroy before goaway <add>// Test destroy before goaway <ide> { <ide> const server = h2.createServer(); <ide> server.on('stream', common.mustCall((stream) => { <ide> const Countdown = require('../common/countdown'); <ide> })); <ide> } <ide> <del>// test destroy before connect <add>// Test destroy before connect <ide> { <ide> const server = h2.createServer(); <ide> server.on('stream', common.mustNotCall()); <ide> const Countdown = require('../common/countdown'); <ide> })); <ide> } <ide> <del>// test close before connect <add>// Test close before connect <ide> { <ide> const server = h2.createServer(); <ide> <ide> const Countdown = require('../common/countdown'); <ide> })); <ide> <ide> const req = client.request(); <del> // should throw goaway error <add> // Should throw goaway error <ide> req.on('error', common.expectsError({ <ide> code: 'ERR_HTTP2_GOAWAY_SESSION', <ide> type: Error, <ide><path>test/parallel/test-http2-client-socket-destroy.js <ide> server.on('stream', common.mustCall((stream) => { <ide> stream.on('close', common.mustCall()); <ide> stream.respond(); <ide> stream.write(body); <del> // purposefully do not end() <add> // Purposefully do not end() <ide> })); <ide> <ide> server.listen(0, common.mustCall(function() { <ide> const client = h2.connect(`http://localhost:${this.address().port}`); <ide> const req = client.request(); <ide> <ide> req.on('response', common.mustCall(() => { <del> // send a premature socket close <add> // Send a premature socket close <ide> client[kSocket].destroy(); <ide> })); <ide> <ide><path>test/parallel/test-http2-compat-serverresponse-end.js <ide> const { <ide> const request = client.request(headers); <ide> request.on('response', mustCall((headers, flags) => { <ide> strictEqual(headers[HTTP2_HEADER_STATUS], HTTP_STATUS_OK); <del> strictEqual(flags, 5); // the end of stream flag is set <add> strictEqual(flags, 5); // The end of stream flag is set <ide> strictEqual(headers.foo, 'bar'); <ide> })); <ide> request.on('data', mustNotCall()); <ide> const { <ide> const request = client.request(headers); <ide> request.on('response', mustCall((headers, flags) => { <ide> strictEqual(headers[HTTP2_HEADER_STATUS], HTTP_STATUS_OK); <del> strictEqual(flags, 5); // the end of stream flag is set <add> strictEqual(flags, 5); // The end of stream flag is set <ide> strictEqual(headers.foo, 'bar'); <ide> })); <ide> request.on('data', mustNotCall()); <ide> const { <ide> const request = client.request(headers); <ide> request.on('response', mustCall((headers, flags) => { <ide> strictEqual(headers[HTTP2_HEADER_STATUS], HTTP_STATUS_OK); <del> strictEqual(flags, 5); // the end of stream flag is set <add> strictEqual(flags, 5); // The end of stream flag is set <ide> strictEqual(headers.foo, 'bar'); <ide> })); <ide> request.on('data', mustNotCall()); <ide> const { <ide> const request = client.request(headers); <ide> request.on('response', mustCall((headers, flags) => { <ide> strictEqual(headers[HTTP2_HEADER_STATUS], HTTP_STATUS_OK); <del> strictEqual(flags, 5); // the end of stream flag is set <add> strictEqual(flags, 5); // The end of stream flag is set <ide> strictEqual(headers.foo, 'bar'); <ide> })); <ide> request.on('data', mustNotCall()); <ide> const { <ide> const request = client.request(headers); <ide> request.on('response', mustCall((headers, flags) => { <ide> strictEqual(headers[HTTP2_HEADER_STATUS], HTTP_STATUS_OK); <del> strictEqual(flags, 5); // the end of stream flag is set <add> strictEqual(flags, 5); // The end of stream flag is set <ide> })); <ide> request.on('data', mustNotCall()); <ide> request.on('end', mustCall(() => { <ide><path>test/parallel/test-http2-compat-serverresponse-flushheaders.js <ide> server.listen(0, common.mustCall(function() { <ide> const port = server.address().port; <ide> server.once('request', common.mustCall(function(request, response) { <ide> assert.strictEqual(response.headersSent, false); <del> assert.strictEqual(response._header, false); // alias for headersSent <add> assert.strictEqual(response._header, false); // Alias for headersSent <ide> response.flushHeaders(); <ide> assert.strictEqual(response.headersSent, true); <ide> assert.strictEqual(response._header, true); <ide><path>test/parallel/test-http2-compat-write-head-destroyed.js <ide> const http2 = require('http2'); <ide> // Check that writeHead, write and end do not crash in compatibility mode <ide> <ide> const server = http2.createServer(common.mustCall((req, res) => { <del> // destroy the stream first <add> // Destroy the stream first <ide> req.stream.destroy(); <ide> <ide> res.writeHead(200); <ide><path>test/parallel/test-http2-connect.js <ide> const { connect: netConnect } = require('net'); <ide> })); <ide> } <ide> <del>// check for https as protocol <add>// Check for https as protocol <ide> { <ide> const authority = 'https://localhost'; <ide> // A socket error may or may not be reported, keep this as a non-op <ide><path>test/parallel/test-http2-head-request.js <ide> server.listen(0, () => { <ide> <ide> req.on('response', common.mustCall((headers, flags) => { <ide> assert.strictEqual(headers[HTTP2_HEADER_STATUS], 200); <del> assert.strictEqual(flags, 5); // the end of stream flag is set <add> assert.strictEqual(flags, 5); // The end of stream flag is set <ide> })); <ide> req.on('data', common.mustNotCall()); <ide> req.on('end', common.mustCall(() => { <ide><path>test/parallel/test-http2-session-settings.js <ide> server.listen( <ide> ); <ide> }); <ide> <del> // error checks for enablePush <add> // Error checks for enablePush <ide> [1, {}, 'test', [], null, Infinity, NaN].forEach((i) => { <ide> common.expectsError( <ide> () => client.settings({ enablePush: i }), <ide><path>test/parallel/test-http2-session-unref.js <ide> server.listen(0, common.mustCall(() => { <ide> client.unref(); <ide> } <ide> <del> // unref destroyed client <add> // Unref destroyed client <ide> { <ide> const client = http2.connect(`http://localhost:${port}`); <ide> <ide> server.listen(0, common.mustCall(() => { <ide> })); <ide> } <ide> <del> // unref destroyed client <add> // Unref destroyed client <ide> { <ide> const client = http2.connect(`http://localhost:${port}`, { <ide> createConnection: common.mustCall(() => clientSide) <ide><path>test/parallel/test-https-agent-create-connection.js <ide> function createServer() { <ide> })); <ide> } <ide> <del>// use port and option connect <add>// Use port and option connect <ide> { <ide> const server = createServer(); <ide> server.listen(0, common.mustCall(() => { <ide> function createServer() { <ide> })); <ide> } <ide> <del>// options is null <add>// `options` is null <ide> { <ide> const server = createServer(); <ide> server.listen(0, common.mustCall(() => { <ide> function createServer() { <ide> })); <ide> } <ide> <del>// options is undefined <add>// `options` is undefined <ide> { <ide> const server = createServer(); <ide> server.listen(0, common.mustCall(() => { <ide><path>test/parallel/test-https-agent-getname.js <ide> assert.strictEqual( <ide> 'localhost:::::::::::::::::::' <ide> ); <ide> <del>// pass all options arguments <add>// Pass all options arguments <ide> const options = { <ide> host: '0.0.0.0', <ide> port: 443, <ide><path>test/parallel/test-https-argument-of-creating.js <ide> const tls = require('tls'); <ide> <ide> const dftProtocol = {}; <ide> <del>// test for immutable `opts` <add>// Test for immutable `opts` <ide> { <ide> const opts = { foo: 'bar', ALPNProtocols: [ 'http/1.1' ] }; <ide> const server = https.createServer(opts); <ide><path>test/parallel/test-https-client-override-global-agent.js <ide> server.listen(0, common.mustCall(() => { <ide> https.globalAgent = agent; <ide> <ide> makeRequest(); <del> assert(agent.sockets.hasOwnProperty(name)); // agent has indeed been used <add> assert(agent.sockets.hasOwnProperty(name)); // Agent has indeed been used <ide> })); <ide> <ide> function makeRequest() { <ide><path>test/parallel/test-https-client-renegotiation-limit.js <ide> const fixtures = require('../common/fixtures'); <ide> // Renegotiation as a protocol feature was dropped after TLS1.2. <ide> tls.DEFAULT_MAX_VERSION = 'TLSv1.2'; <ide> <del>// renegotiation limits to test <add>// Renegotiation limits to test <ide> const LIMITS = [0, 1, 2, 3, 5, 10, 16]; <ide> <ide> { <ide> function test(next) { <ide> <ide> spam(); <ide> <del> // simulate renegotiation attack <add> // Simulate renegotiation attack <ide> function spam() { <ide> client.renegotiate({}, (err) => { <ide> assert.ifError(err); <ide><path>test/parallel/test-https-strict.js <ide> function makeReq(path, port, error, host, ca) { <ide> } <ide> <ide> function allListening() { <del> // ok, ready to start the tests! <add> // Ok, ready to start the tests! <ide> const port1 = server1.address().port; <ide> const port2 = server2.address().port; <ide> const port3 = server3.address().port; <ide><path>test/parallel/test-https-timeout.js <ide> const options = { <ide> cert: fixtures.readKey('agent1-cert.pem') <ide> }; <ide> <del>// a server that never replies <add>// A server that never replies <ide> const server = https.createServer(options, function() { <ide> console.log('Got request. Doing nothing.'); <ide> }).listen(0, common.mustCall(function() { <ide><path>test/parallel/test-icu-punycode.js <ide> const { internalBinding } = require('internal/test/binding'); <ide> const icu = internalBinding('icu'); <ide> const assert = require('assert'); <ide> <del>// test hasConverter method <add>// Test hasConverter method <ide> assert(icu.hasConverter('utf-8'), <ide> 'hasConverter should report coverter exists for utf-8'); <ide> assert(!icu.hasConverter('x'), <ide><path>test/parallel/test-intl.js <ide> if (!common.hasIntl) { <ide> const collOpts = { sensitivity: 'base', ignorePunctuation: true }; <ide> const coll = new Intl.Collator(['en'], collOpts); <ide> <del> // ignore punctuation <add> // Ignore punctuation <ide> assert.strictEqual(coll.compare('blackbird', 'black-bird'), 0); <del> // compare less <add> // Compare less <ide> assert.strictEqual(coll.compare('blackbird', 'red-bird'), -1); <del> // compare greater <add> // Compare greater <ide> assert.strictEqual(coll.compare('bluebird', 'blackbird'), 1); <del> // ignore case <add> // Ignore case <ide> assert.strictEqual(coll.compare('Bluebird', 'bluebird'), 0); <del> // ffi ligature (contraction) <add> // `ffi` ligature (contraction) <ide> assert.strictEqual(coll.compare('\ufb03', 'ffi'), 0); <ide> } <ide><path>test/parallel/test-listen-fd-cluster.js <ide> function master() { <ide> <ide> function worker() { <ide> console.error('worker, about to create server and listen on fd=3'); <del> // start a server on fd=3 <add> // Start a server on fd=3 <ide> http.createServer(function(req, res) { <ide> console.error('request on worker'); <ide> console.error('%s %s', req.method, req.url, req.headers); <ide><path>test/parallel/test-listen-fd-detached-inherit.js <ide> function parent() { <ide> <ide> // Run as a child of the parent() mode. <ide> function child() { <del> // start a server on fd=3 <add> // Start a server on fd=3 <ide> http.createServer(function(req, res) { <ide> console.error('request on child'); <ide> console.error('%s %s', req.method, req.url, req.headers); <ide><path>test/parallel/test-listen-fd-detached.js <ide> function parent() { <ide> } <ide> <ide> function child() { <del> // start a server on fd=3 <add> // Start a server on fd=3 <ide> http.createServer(function(req, res) { <ide> console.error('request on child'); <ide> console.error('%s %s', req.method, req.url, req.headers); <ide><path>test/parallel/test-listen-fd-server.js <ide> function child() { <ide> process.exit(0); <ide> }); <ide> <del> // start a server on fd=3 <add> // Start a server on fd=3 <ide> http.createServer(function(req, res) { <ide> console.error('request on child'); <ide> console.error('%s %s', req.method, req.url, req.headers); <ide><path>test/parallel/test-microtask-queue-run-immediate.js <ide> process.on('exit', function() { <ide> assert.strictEqual(done, 2); <ide> }); <ide> <del>// no nextTick, microtask <add>// No nextTick, microtask <ide> setImmediate(function() { <ide> enqueueMicrotask(function() { <ide> done++; <ide><path>test/parallel/test-microtask-queue-run.js <ide> process.on('exit', function() { <ide> assert.strictEqual(done, 2); <ide> }); <ide> <del>// no nextTick, microtask <add>// No nextTick, microtask <ide> setTimeout(function() { <ide> enqueueMicrotask(function() { <ide> done++; <ide><path>test/parallel/test-net-can-reset-timeout.js <ide> const server = net.createServer(common.mustCall(function(stream) { <ide> <ide> stream.once('timeout', common.mustCall(function() { <ide> console.log('timeout'); <del> // try to reset the timeout. <add> // Try to reset the timeout. <ide> stream.write('WHAT.'); <ide> })); <ide> <ide><path>test/parallel/test-net-connect-memleak.js <ide> const gcListener = { ongc() { collected = true; } }; <ide> const sock = net.createConnection( <ide> server.address().port, <ide> common.mustCall(() => { <del> assert.strictEqual(gcObject, gcObject); // keep reference alive <add> assert.strictEqual(gcObject, gcObject); // Keep reference alive <ide> assert.strictEqual(collected, false); <ide> setImmediate(done, sock); <ide> })); <ide><path>test/parallel/test-net-pingpong.js <ide> function pingPongTest(port, host) { <ide> <ide> socket.on('end', common.mustCall(function() { <ide> assert.strictEqual(socket.allowHalfOpen, true); <del> assert.strictEqual(socket.writable, true); // because allowHalfOpen <add> assert.strictEqual(socket.writable, true); // Because allowHalfOpen <ide> assert.strictEqual(socket.readable, false); <ide> socket.end(); <ide> })); <ide><path>test/parallel/test-net-server-listen-handle.js <ide> function randomHandle(type) { <ide> assert.fail(`unable to bind ${handleName}: ${getSystemErrorName(errno)}`); <ide> } <ide> <del> if (!common.isWindows) { // fd doesn't work on windows <add> if (!common.isWindows) { // `fd` doesn't work on Windows. <ide> // err >= 0 but fd = -1, should not happen <ide> assert.notStrictEqual(handle.fd, -1, <ide> `Bound ${handleName} has fd -1 and errno ${errno}`); <ide> function randomPipes(number) { <ide> <ide> // Not a public API, used by child_process <ide> if (!common.isWindows) { // Windows doesn't support {fd: <n>} <del> const handles = randomPipes(2); // generate pipes in advance <add> const handles = randomPipes(2); // Generate pipes in advance <ide> // Test listen(pipe) <ide> net.createServer() <ide> .listen(handles[0]) <ide> if (!common.isWindows) { // Windows doesn't support {fd: <n>} <ide> } <ide> <ide> if (!common.isWindows) { // Windows doesn't support {fd: <n>} <del> const handles = randomPipes(6); // generate pipes in advance <add> const handles = randomPipes(6); // Generate pipes in advance <ide> // Test listen({handle: pipe}, cb) <ide> net.createServer() <ide> .listen({ handle: handles[0] }, closePipeServer(handles[0])); <ide><path>test/parallel/test-net-server-unref-persistent.js <ide> const common = require('../common'); <ide> const net = require('net'); <ide> const server = net.createServer(); <ide> <del>// unref before listening <add>// Unref before listening <ide> server.unref(); <ide> server.listen(); <ide> <ide><path>test/parallel/test-net-socket-local-address.js <ide> 'use strict'; <ide> const common = require('../common'); <del>// skip test in FreeBSD jails <add>// Skip test in FreeBSD jails <ide> if (common.inFreeBSDJail) <ide> common.skip('In a FreeBSD jail'); <ide> <ide><path>test/parallel/test-path-relative.js <ide> const relativeTests = [ <ide> ] <ide> ], <ide> [ path.posix.relative, <del> // arguments result <add> // Arguments result <ide> [['/var/lib', '/var', '..'], <ide> ['/var/lib', '/bin', '../../bin'], <ide> ['/var/lib', '/var/lib', ''], <ide><path>test/parallel/test-process-kill-pid.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> <del>// test variants of pid <add>// Test variants of pid <ide> // <ide> // null: TypeError <ide> // undefined: TypeError <ide><path>test/parallel/test-process-raw-debug.js <ide> function parent() { <ide> } <ide> <ide> function child() { <del> // even when all hope is lost... <add> // Even when all hope is lost... <ide> <ide> process.nextTick = function() { <ide> throw new Error('No ticking!'); <ide><path>test/parallel/test-promises-unhandled-proxy-rejections.js <ide> common.expectWarning({ <ide> UnhandledPromiseRejectionWarning: expectedPromiseWarning, <ide> }); <ide> <del>// ensure this doesn't crash <add>// Ensure this doesn't crash <ide> Promise.reject(thorny); <ide><path>test/parallel/test-promises-unhandled-symbol-rejections.js <ide> common.expectWarning({ <ide> ], <ide> }); <ide> <del>// ensure this doesn't crash <add>// Ensure this doesn't crash <ide> Promise.reject(Symbol()); <ide><path>test/parallel/test-querystring-escape.js <ide> common.expectsError( <ide> } <ide> ); <ide> <del>// using toString for objects <add>// Using toString for objects <ide> assert.strictEqual( <ide> qs.escape({ test: 5, toString: () => 'test', valueOf: () => 10 }), <ide> 'test' <ide><path>test/parallel/test-querystring.js <ide> qsNoMungeTestCases.forEach((testCase) => { <ide> assert.deepStrictEqual(qs.stringify(testCase[1], '&', '='), testCase[0]); <ide> }); <ide> <del>// test the nested qs-in-qs case <add>// Test the nested qs-in-qs case <ide> { <ide> const f = qs.parse('a=b&q=x%3Dy%26y%3Dz'); <ide> check(f, createWithNoPrototype([ <ide> qsNoMungeTestCases.forEach((testCase) => { <ide> check(f.q, expectedInternal); <ide> } <ide> <del>// now test stringifying <add>// Now test stringifying <ide> <ide> // basic <ide> qsTestCases.forEach((testCase) => { <ide> common.expectsError( <ide> } <ide> ); <ide> <del>// coerce numbers to string <add>// Coerce numbers to string <ide> assert.strictEqual(qs.stringify({ foo: 0 }), 'foo=0'); <ide> assert.strictEqual(qs.stringify({ foo: -0 }), 'foo=0'); <ide> assert.strictEqual(qs.stringify({ foo: 3 }), 'foo=3'); <ide> qsUnescapeTestCases.forEach((testCase) => { <ide> assert.strictEqual(qs.unescapeBuffer(testCase[0]).toString(), testCase[1]); <ide> }); <ide> <del>// test overriding .unescape <add>// Test overriding .unescape <ide> { <ide> const prevUnescape = qs.unescape; <ide> qs.unescape = (str) => { <ide><path>test/parallel/test-readline-interface.js <ide> function isWarned(emitter) { <ide> } <ide> <ide> { <del> // set crlfDelay to 5000ms <add> // Set crlfDelay to 5000ms <ide> const fi = new FakeInput(); <ide> const rli = new readline.Interface({ <ide> input: fi, <ide> function isWarned(emitter) { <ide> rli.close(); <ide> } <ide> <del> // default history size 30 <add> // Default history size 30 <ide> { <ide> const fi = new FakeInput(); <ide> const rli = new readline.Interface( <ide> function isWarned(emitter) { <ide> assert.ok(called); <ide> } <ide> <del> // sending a blank line <add> // Sending a blank line <ide> { <ide> const fi = new FakeInput(); <ide> const rli = new readline.Interface( <ide> function isWarned(emitter) { <ide> rli.close(); <ide> } <ide> <del> // calling the question callback <add> // Calling the question callback <ide> { <ide> let called = false; <ide> const fi = new FakeInput(); <ide> function isWarned(emitter) { <ide> rli.close(); <ide> } <ide> <del> // sending a multi-line question <add> // Sending a multi-line question <ide> { <ide> const fi = new FakeInput(); <ide> const rli = new readline.Interface( <ide><path>test/parallel/test-readline-keys.js <ide> const addKeyIntervalTest = (sequences, expectedKeys, interval = 550, <ide> return fn; <ide> }; <ide> <del>// regular alphanumerics <add>// Regular alphanumerics <ide> addTest('io.JS', [ <ide> { name: 'i', sequence: 'i' }, <ide> { name: 'o', sequence: 'o' }, <ide> addTest('io.JS', [ <ide> { name: 's', sequence: 'S', shift: true }, <ide> ]); <ide> <del>// named characters <add>// Named characters <ide> addTest('\n\r\t', [ <ide> { name: 'enter', sequence: '\n' }, <ide> { name: 'return', sequence: '\r' }, <ide> { name: 'tab', sequence: '\t' }, <ide> ]); <ide> <del>// space and backspace <add>// Space and backspace <ide> addTest('\b\x7f\x1b\b\x1b\x7f\x1b\x1b \x1b ', [ <ide> { name: 'backspace', sequence: '\b' }, <ide> { name: 'backspace', sequence: '\x7f' }, <ide> addTest('\b\x7f\x1b\b\x1b\x7f\x1b\x1b \x1b ', [ <ide> { name: 'space', sequence: '\x1b ', meta: true }, <ide> ]); <ide> <del>// escape key <add>// Escape key <ide> addTest('\x1b\x1b\x1b', [ <ide> { name: 'escape', sequence: '\x1b\x1b\x1b', meta: true }, <ide> ]); <ide> <del>// control keys <add>// Control keys <ide> addTest('\x01\x0b\x10', [ <ide> { name: 'a', sequence: '\x01', ctrl: true }, <ide> { name: 'k', sequence: '\x0b', ctrl: true }, <ide> { name: 'p', sequence: '\x10', ctrl: true }, <ide> ]); <ide> <del>// alt keys <add>// Alt keys <ide> addTest('a\x1baA\x1bA', [ <ide> { name: 'a', sequence: 'a' }, <ide> { name: 'a', sequence: '\x1ba', meta: true }, <ide> addTest('\x1b[11~\x1b[12~\x1b[13~\x1b[14~', [ <ide> { name: 'f4', sequence: '\x1b[14~', code: '[14~' }, <ide> ]); <ide> <del>// from Cygwin and used in libuv <add>// From Cygwin and used in libuv <ide> addTest('\x1b[[A\x1b[[B\x1b[[C\x1b[[D\x1b[[E', [ <ide> { name: 'f1', sequence: '\x1b[[A', code: '[[A' }, <ide> { name: 'f2', sequence: '\x1b[[B', code: '[[B' }, <ide> addTest('\x1b[[A\x1b[[B\x1b[[C\x1b[[D\x1b[[E', [ <ide> { name: 'f5', sequence: '\x1b[[E', code: '[[E' }, <ide> ]); <ide> <del>// common <add>// Common <ide> addTest('\x1b[15~\x1b[17~\x1b[18~\x1b[19~\x1b[20~\x1b[21~\x1b[23~\x1b[24~', [ <ide> { name: 'f5', sequence: '\x1b[15~', code: '[15~' }, <ide> { name: 'f6', sequence: '\x1b[17~', code: '[17~' }, <ide> addTest('\x1bOA\x1bOB\x1bOC\x1bOD\x1bOE\x1bOF\x1bOH', [ <ide> { name: 'home', sequence: '\x1bOH', code: 'OH' }, <ide> ]); <ide> <del>// old xterm shift-arrows <add>// Old xterm shift-arrows <ide> addTest('\x1bO2A\x1bO2B', [ <ide> { name: 'up', sequence: '\x1bO2A', code: 'OA', shift: true }, <ide> { name: 'down', sequence: '\x1bO2B', code: 'OB', shift: true }, <ide> addTest('\x1b[A\x1b[B\x1b[2A\x1b[2B', [ <ide> { name: 'down', sequence: '\x1b[2B', code: '[B', shift: true }, <ide> ]); <ide> <del>// rxvt keys with modifiers <add>// `rxvt` keys with modifiers. <ide> // eslint-disable-next-line max-len <ide> addTest('\x1b[20~\x1b[2$\x1b[2^\x1b[3$\x1b[3^\x1b[5$\x1b[5^\x1b[6$\x1b[6^\x1b[7$\x1b[7^\x1b[8$\x1b[8^', [ <ide> { name: 'f9', sequence: '\x1b[20~', code: '[20~' }, <ide> addTest('\x1b[20~\x1b[2$\x1b[2^\x1b[3$\x1b[3^\x1b[5$\x1b[5^\x1b[6$\x1b[6^\x1b[7$ <ide> { name: 'end', sequence: '\x1b[8^', code: '[8^', ctrl: true }, <ide> ]); <ide> <del>// misc <add>// Misc <ide> addTest('\x1b[Z', [ <ide> { name: 'tab', sequence: '\x1b[Z', code: '[Z', shift: true }, <ide> ]); <ide> addTest('\x1b[DD\x1b[2DD\x1b[2^D', [ <ide> { name: 'd', sequence: 'D', shift: true }, <ide> ]); <ide> <del>// color sequences <add>// Color sequences <ide> addTest('\x1b[31ma\x1b[39ma', [ <ide> { name: 'undefined', sequence: '\x1b[31m', code: '[31m' }, <ide> { name: 'a', sequence: 'a' }, <ide> { name: 'undefined', sequence: '\x1b[39m', code: '[39m' }, <ide> { name: 'a', sequence: 'a' }, <ide> ]); <ide> <del>// rxvt keys with modifiers <add>// `rxvt` keys with modifiers. <ide> addTest('\x1b[a\x1b[b\x1b[c\x1b[d\x1b[e', [ <ide> { name: 'up', sequence: '\x1b[a', code: '[a', shift: true }, <ide> { name: 'down', sequence: '\x1b[b', code: '[b', shift: true }, <ide> addTest('\x1bOa\x1bOb\x1bOc\x1bOd\x1bOe', [ <ide> ]); <ide> <ide> // Reduce array of addKeyIntervalTest(..) right to left <del>// with () => {} as initial function <add>// with () => {} as initial function. <ide> const runKeyIntervalTests = [ <del> // escape character <add> // Escape character <ide> addKeyIntervalTest('\x1b', [ <ide> { name: 'escape', sequence: '\x1b', meta: true } <ide> ]), <del> // chain of escape characters <add> // Chain of escape characters. <ide> addKeyIntervalTest('\x1b\x1b\x1b\x1b'.split(''), [ <ide> { name: 'escape', sequence: '\x1b', meta: true }, <ide> { name: 'escape', sequence: '\x1b', meta: true }, <ide> const runKeyIntervalTests = [ <ide> ]) <ide> ].reverse().reduce((acc, fn) => fn(acc), () => {}); <ide> <del>// Run key interval tests one after another <add>// Run key interval tests one after another. <ide> runKeyIntervalTests(); <ide><path>test/parallel/test-repl-autolibs.js <ide> function test2() { <ide> putIn.write = function(data) { <ide> gotWrite = true; <ide> if (data.length) { <del> // repl response error message <add> // REPL response error message <ide> assert.strictEqual(data, '{}\n'); <ide> // Original value wasn't overwritten <ide> assert.strictEqual(val, global.url); <ide><path>test/parallel/test-repl-context.js <ide> const stream = new ArrayStream(); <ide> assert.strictEqual(server.lines[0], '_ = 500;'); <ide> assert.strictEqual(server.last, 500); <ide> <del> // reset the server context <add> // Reset the server context <ide> server.resetContext(); <ide> assert.ok(!server.underscoreAssigned); <ide> assert.strictEqual(server.lines.length, 0); <ide><path>test/parallel/test-repl-end-emits-exit.js <ide> function testTerminalMode() { <ide> }); <ide> <ide> process.nextTick(function() { <del> // manually fire a ^D keypress <add> // Manually fire a ^D keypress <ide> stream.emit('data', '\u0004'); <ide> }); <ide> <ide><path>test/parallel/test-repl-options.js <ide> assert.strictEqual(r1.ignoreUndefined, false); <ide> assert.strictEqual(r1.replMode, repl.REPL_MODE_SLOPPY); <ide> assert.strictEqual(r1.historySize, 30); <ide> <del>// test r1 for backwards compact <add>// Test r1 for backwards compact <ide> assert.strictEqual(r1.rli.input, stream); <ide> assert.strictEqual(r1.rli.output, stream); <ide> assert.strictEqual(r1.rli.input, r1.inputStream); <ide> assert.strictEqual(r2.writer, writer); <ide> assert.strictEqual(r2.replMode, repl.REPL_MODE_STRICT); <ide> assert.strictEqual(r2.historySize, 50); <ide> <del>// test r2 for backwards compact <add>// Test r2 for backwards compact <ide> assert.strictEqual(r2.rli.input, stream); <ide> assert.strictEqual(r2.rli.output, stream); <ide> assert.strictEqual(r2.rli.input, r2.inputStream); <ide><path>test/parallel/test-repl-save-load.js <ide> let loadFile = join(tmpdir.path, 'file.does.not.exist'); <ide> putIn.write = function(data) { <ide> // Make sure I get a failed to load message and not some crazy error <ide> assert.strictEqual(data, `Failed to load:${loadFile}\n`); <del> // eat me to avoid work <add> // Eat me to avoid work <ide> putIn.write = () => {}; <ide> }; <ide> putIn.run([`.load ${loadFile}`]); <ide><path>test/parallel/test-repl-tab-complete.js <ide> testCustomCompleterAsyncMode.complete('a', common.mustCall((error, data) => { <ide> ]); <ide> })); <ide> <del>// tab completion in editor mode <add>// Tab completion in editor mode <ide> const editorStream = new ArrayStream(); <ide> const editor = repl.start({ <ide> stream: editorStream, <ide><path>test/parallel/test-repl.js <ide> const prompt_tcp = 'node via TCP socket> '; <ide> // Absolute path to test/fixtures/a.js <ide> const moduleFilename = fixtures.path('a'); <ide> <del>// function for REPL to run <add>// Function for REPL to run <ide> global.invoke_me = function(arg) { <ide> return `invoked ${arg}`; <ide> }; <ide> const errorTests = [ <ide> send: ')', <ide> expect: 'undefined' <ide> }, <del> // npm prompt error message <add> // `npm` prompt error message. <ide> { <ide> send: 'npm install foobar', <ide> expect: [ <ide> const errorTests = [ <ide> /^SyntaxError: / <ide> ] <ide> }, <del> // bring back the repl to prompt <add> // Bring back the repl to prompt <ide> { <ide> send: '.break', <ide> expect: '' <ide><path>test/parallel/test-require-extension-over-directory.js <ide> 'use strict'; <del>// fixes regression from v4 <add>// Fixes regression from v4 <ide> require('../common'); <ide> const assert = require('assert'); <ide> const fixtures = require('../common/fixtures'); <ide><path>test/parallel/test-require-symlink.js <ide> function test() { <ide> fs.symlinkSync(linkTarget, linkDir, 'dir'); <ide> fs.symlinkSync(linkScriptTarget, linkScript); <ide> <del> // load symlinked-module <add> // Load symlinked-module <ide> const fooModule = require(path.join(tmpDirTarget, 'foo.js')); <ide> assert.strictEqual(fooModule.dep1.bar.version, 'CORRECT_VERSION'); <ide> assert.strictEqual(fooModule.dep2.bar.version, 'CORRECT_VERSION'); <ide> <del> // load symlinked-script as main <add> // Load symlinked-script as main <ide> const node = process.execPath; <ide> const child = spawn(node, ['--preserve-symlinks', linkScript]); <ide> child.on('close', function(code, signal) { <ide><path>test/parallel/test-stream-big-push.js <ide> r._read = common.mustCall(_read, 3); <ide> <ide> r.on('end', common.mustCall()); <ide> <del>// push some data in to start. <del>// we've never gotten any read event at this point. <add>// Push some data in to start. <add>// We've never gotten any read event at this point. <ide> const ret = r.push(str); <del>// should be false. > hwm <add>// Should be false. > hwm <ide> assert(!ret); <ide> let chunk = r.read(); <ide> assert.strictEqual(chunk, str); <ide><path>test/parallel/test-stream-finished.js <ide> const { promisify } = require('util'); <ide> })); <ide> <ide> rs.push(null); <del> rs.emit('close'); // should not trigger an error <add> rs.emit('close'); // Should not trigger an error <ide> rs.resume(); <ide> } <ide> <ide> const { promisify } = require('util'); <ide> assert(err, 'premature close error'); <ide> })); <ide> <del> rs.emit('close'); // should trigger error <add> rs.emit('close'); // Should trigger error <ide> rs.push(null); <ide> rs.resume(); <ide> } <ide><path>test/parallel/test-stream-readable-destroy.js <ide> const assert = require('assert'); <ide> } <ide> <ide> { <del> // destroy and destroy callback <add> // Destroy and destroy callback <ide> const read = new Readable({ <ide> read() {} <ide> }); <ide><path>test/parallel/test-stream-readable-event.js <ide> const Readable = require('stream').Readable; <ide> // #20923 <ide> const r = new Readable(); <ide> r._read = function() { <del> // actually doing thing here <add> // Actually doing thing here <ide> }; <ide> r.on('data', function() {}); <ide> <ide><path>test/parallel/test-stream-readable-pause-and-resume.js <ide> function readAndPause() { <ide> readAndPause(); <ide> rs.resume(); <ide> }); <del> }, 1); // only call ondata once <add> }, 1); // Only call ondata once <ide> <ide> rs.on('data', ondata); <ide> } <ide><path>test/parallel/test-stream-readable-reading-readingMore.js <ide> const Readable = require('stream').Readable; <ide> // If the stream has ended, we shouldn't be reading <ide> assert.strictEqual(state.ended, !state.reading); <ide> <del> // consume all the data <add> // Consume all the data <ide> while (readable.read() !== null) {} <ide> <del> if (expectedReadingMore.length === 0) // reached end of stream <add> if (expectedReadingMore.length === 0) // Reached end of stream <ide> process.nextTick(common.mustCall(onStreamEnd, 1)); <ide> }, 3)); <ide> <ide> const Readable = require('stream').Readable; <ide> readable.on('end', common.mustCall(onStreamEnd)); <ide> readable.push('pushed'); <ide> <del> // stop emitting 'data' events <add> // Stop emitting 'data' events <ide> assert.strictEqual(state.flowing, true); <ide> readable.pause(); <ide> <ide> const Readable = require('stream').Readable; <ide> process.nextTick(function() { <ide> readable.resume(); <ide> <del> // stop emitting 'data' events <add> // Stop emitting 'data' events <ide> assert.strictEqual(state.flowing, true); <ide> readable.pause(); <ide> <ide><path>test/parallel/test-stream-transform-split-highwatermark.js <ide> function testTransform(expectedReadableHwm, expectedWritableHwm, options) { <ide> assert.strictEqual(t._writableState.highWaterMark, expectedWritableHwm); <ide> } <ide> <del>// test overriding defaultHwm <add>// Test overriding defaultHwm <ide> testTransform(666, DEFAULT, { readableHighWaterMark: 666 }); <ide> testTransform(DEFAULT, 777, { writableHighWaterMark: 777 }); <ide> testTransform(666, 777, { <ide> testTransform(666, 777, { <ide> testTransform(0, DEFAULT, { readableHighWaterMark: 0 }); <ide> testTransform(DEFAULT, 0, { writableHighWaterMark: 0 }); <ide> <del>// test highWaterMark overriding <add>// Test highWaterMark overriding <ide> testTransform(555, 555, { <ide> highWaterMark: 555, <ide> readableHighWaterMark: 666, <ide> testTransform(0, 0, { <ide> writableHighWaterMark: 777, <ide> }); <ide> <del>// test undefined, null <add>// Test undefined, null <ide> [undefined, null].forEach((v) => { <ide> testTransform(DEFAULT, DEFAULT, { readableHighWaterMark: v }); <ide> testTransform(DEFAULT, DEFAULT, { writableHighWaterMark: v }); <ide><path>test/parallel/test-stream-unshift-read-race.js <ide> let pushedNull = false; <ide> r._read = function(n) { <ide> assert(!pushedNull, '_read after null push'); <ide> <del> // every third chunk is fast <add> // Every third chunk is fast <ide> push(!(chunks % 3)); <ide> <ide> function push(fast) { <ide><path>test/parallel/test-stream-writable-destroy.js <ide> const assert = require('assert'); <ide> } <ide> <ide> { <del> // destroy and destroy callback <add> // Destroy and destroy callback <ide> const write = new Writable({ <ide> write(chunk, enc, cb) { cb(); } <ide> }); <ide><path>test/parallel/test-stream-writableState-ending.js <ide> const result = writable.end('testing function end()', () => { <ide> // End returns the writable instance <ide> assert.strictEqual(result, writable); <ide> <del>// ending, ended = true. <add>// Ending, ended = true. <ide> // finished = false. <ide> testStates(true, false, true); <ide><path>test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js <ide> assert.strictEqual(writable._writableState.bufferedRequestCount, 0); <ide> writable.cork(); <ide> assert.strictEqual(writable._writableState.corked, 2); <ide> <del>// the first chunk is buffered <add>// The first chunk is buffered <ide> writable.write('first chunk'); <ide> assert.strictEqual(writable._writableState.bufferedRequestCount, 1); <ide> <del>// first uncork does nothing <add>// First uncork does nothing <ide> writable.uncork(); <ide> assert.strictEqual(writable._writableState.corked, 1); <ide> assert.strictEqual(writable._writableState.bufferedRequestCount, 1); <ide><path>test/parallel/test-stream2-push.js <ide> writer._write = function(chunk, encoding, cb) { <ide> writer.on('finish', finish); <ide> <ide> <del>// now emit some chunks. <add>// Now emit some chunks. <ide> <ide> const chunk = 'asdfg'; <ide> <ide><path>test/parallel/test-stream2-readable-from-list.js <ide> function bufferListFromArray(arr) { <ide> ret = fromList(2, { buffer: list, length: 8 }); <ide> assert.strictEqual(ret.toString(), 'ba'); <ide> <del> // read more than we have. <add> // Read more than we have. <ide> ret = fromList(100, { buffer: list, length: 6 }); <ide> assert.strictEqual(ret.toString(), 'zykuel'); <ide> <ide> function bufferListFromArray(arr) { <ide> ret = fromList(2, { buffer: list, length: 8, decoder: true }); <ide> assert.strictEqual(ret, 'ba'); <ide> <del> // read more than we have. <add> // Read more than we have. <ide> ret = fromList(100, { buffer: list, length: 6, decoder: true }); <ide> assert.strictEqual(ret, 'zykuel'); <ide> <ide><path>test/parallel/test-stream2-readable-non-empty-end.js <ide> test.on('readable', function() { <ide> test.read(0); <ide> <ide> function next() { <del> // now let's make 'end' happen <add> // Now let's make 'end' happen <ide> test.removeListener('end', thrower); <ide> test.on('end', common.mustCall()); <ide> <del> // one to get the last byte <add> // One to get the last byte <ide> let r = test.read(); <ide> assert(r); <ide> assert.strictEqual(r.length, 1); <ide><path>test/parallel/test-stream2-transform.js <ide> const Transform = require('_stream_transform'); <ide> // Verify asymmetric transform (expand) <ide> const pt = new Transform(); <ide> <del> // emit each chunk 2 times. <add> // Emit each chunk 2 times. <ide> pt._transform = function(chunk, encoding, cb) { <ide> setTimeout(function() { <ide> pt.push(chunk); <ide> const Transform = require('_stream_transform'); <ide> }; <ide> <ide> pt._flush = function(cb) { <del> // just output whatever we have. <add> // Just output whatever we have. <ide> pt.push(Buffer.from(this.state)); <ide> this.state = ''; <ide> cb(); <ide><path>test/parallel/test-stream2-writable.js <ide> for (let i = 0; i < chunks.length; i++) { <ide> }); <ide> <ide> tw.on('finish', common.mustCall(function() { <del> // got chunks in the right order <add> // Got chunks in the right order <ide> assert.deepStrictEqual(tw.buffer, chunks); <ide> })); <ide> <ide> for (let i = 0; i < chunks.length; i++) { <ide> let drains = 0; <ide> <ide> tw.on('finish', common.mustCall(function() { <del> // got chunks in the right order <add> // Got chunks in the right order <ide> assert.deepStrictEqual(tw.buffer, chunks); <ide> assert.strictEqual(drains, 17); <ide> })); <ide> for (let i = 0; i < chunks.length; i++) { <ide> undefined ]; <ide> <ide> tw.on('finish', function() { <del> // got the expected chunks <add> // Got the expected chunks <ide> assert.deepStrictEqual(tw.buffer, chunks); <ide> }); <ide> <ide> for (let i = 0; i < chunks.length; i++) { <ide> undefined ]; <ide> <ide> tw.on('finish', function() { <del> // got the expected chunks <add> // Got the expected chunks <ide> assert.deepStrictEqual(tw.buffer, chunks); <ide> }); <ide> <ide> for (let i = 0; i < chunks.length; i++) { <ide> <ide> tw.on('finish', common.mustCall(function() { <ide> process.nextTick(common.mustCall(function() { <del> // got chunks in the right order <add> // Got chunks in the right order <ide> assert.deepStrictEqual(tw.buffer, chunks); <del> // called all callbacks <add> // Called all callbacks <ide> assert.deepStrictEqual(callbacks._called, chunks); <ide> })); <ide> })); <ide><path>test/parallel/test-stream3-pause-then-read.js <ide> function push() { <ide> <ide> read100(); <ide> <del>// first we read 100 bytes <add>// First we read 100 bytes. <ide> function read100() { <ide> readn(100, onData); <ide> } <ide> function readn(n, then) { <ide> })(); <ide> } <ide> <del>// Then we listen to some data events <add>// Then we listen to some data events. <ide> function onData() { <ide> expectEndingData -= 100; <ide> console.error('onData'); <ide> let seen = 0; <ide> r.on('data', function od(c) { <ide> seen += c.length; <ide> if (seen >= 100) { <del> // seen enough <add> // Seen enough <ide> r.removeListener('data', od); <ide> r.pause(); <ide> if (seen > 100) { <del> // oh no, seen too much! <del> // put the extra back. <add> // Oh no, seen too much! <add> // Put the extra back. <ide> const diff = seen - 100; <ide> r.unshift(c.slice(c.length - diff)); <ide> console.error('seen too much', seen, diff); <ide> } <ide> <del> // Nothing should be lost in between <add> // Nothing should be lost in-between. <ide> setImmediate(pipeLittle); <ide> } <ide> }); <ide> } <ide> <del>// Just pipe 200 bytes, then unshift the extra and unpipe <add>// Just pipe 200 bytes, then unshift the extra and unpipe. <ide> function pipeLittle() { <ide> expectEndingData -= 200; <ide> console.error('pipe a little'); <ide> function pipeLittle() { <ide> r.pipe(w); <ide> } <ide> <del>// now read 1234 more bytes <add>// Now read 1234 more bytes. <ide> function read1234() { <ide> readn(1234, resumePause); <ide> } <ide> <ide> function resumePause() { <ide> console.error('resumePause'); <del> // Don't read anything, just resume and re-pause a whole bunch <add> // Don't read anything, just resume and re-pause a whole bunch. <ide> r.resume(); <ide> r.pause(); <ide> r.resume(); <ide><path>test/parallel/test-string-decoder-end.js <ide> function testEncoding(encoding) { <ide> } <ide> <ide> function testBuf(encoding, buf) { <del> // write one byte at a time. <add> // Write one byte at a time. <ide> let s = new SD(encoding); <ide> let res1 = ''; <ide> for (let i = 0; i < buf.length; i++) { <ide><path>test/parallel/test-stringbytes-external.js <ide> let ucs2_control = 'a\u0000'; <ide> let write_str = 'a'; <ide> <ide> <del>// first do basic checks <add>// First do basic checks <ide> let b = Buffer.from(write_str, 'ucs2'); <ide> // first check latin1 <ide> let c = b.toString('latin1'); <ide> assert.strictEqual(b[0], 0x61); <ide> assert.strictEqual(b[1], 0); <ide> assert.strictEqual(ucs2_control, c); <ide> <del>// now create big strings <add>// Now create big strings <ide> const size = 1 << 20; <ide> write_str = write_str.repeat(size); <ide> ucs2_control = ucs2_control.repeat(size); <ide> assert.strictEqual(c_bin.length, c_ucs.length); <ide> for (let i = 0; i < c_bin.length; i++) { <ide> assert.strictEqual(c_bin[i], c_ucs[i]); <ide> } <del>// check resultant strings <add>// Check resultant strings <ide> assert.strictEqual(c_bin.toString('ucs2'), c_ucs.toString('ucs2')); <ide> assert.strictEqual(c_bin.toString('latin1'), ucs2_control); <ide> assert.strictEqual(c_ucs.toString('latin1'), ucs2_control); <ide> const PRE_3OF4_APEX = Math.ceil((EXTERN_APEX / 4) * 3) - RADIOS; <ide> const pumped_string2 = slice2.toString('hex'); <ide> const decoded = Buffer.from(pumped_string, 'hex'); <ide> <del> // the string are the same? <add> // The string are the same? <ide> for (let k = 0; k < pumped_string.length; ++k) { <ide> assert.strictEqual(pumped_string[k], pumped_string2[k]); <ide> } <ide> const PRE_3OF4_APEX = Math.ceil((EXTERN_APEX / 4) * 3) - RADIOS; <ide> const pumped_string2 = slice2.toString('base64'); <ide> const decoded = Buffer.from(pumped_string, 'base64'); <ide> <del> // the string are the same? <add> // The string are the same? <ide> for (let k = 0; k < pumped_string.length - 3; ++k) { <ide> assert.strictEqual(pumped_string[k], pumped_string2[k]); <ide> } <ide><path>test/parallel/test-timers-ordering.js <ide> function f(i) { <ide> `current ts ${now} < prev ts ${last_ts} + 1`); <ide> last_ts = now; <ide> <del> // schedule next iteration <add> // Schedule next iteration <ide> setTimeout(f, 1, i + 1); <ide> } <ide> } <ide><path>test/parallel/test-timers-uncaught-exception.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> const errorMsg = 'BAM!'; <ide> <del>// the first timer throws... <add>// The first timer throws... <ide> setTimeout(common.mustCall(function() { <ide> throw new Error(errorMsg); <ide> }), 1); <ide><path>test/parallel/test-tls-alpn-server-client.js <ide> function Test1() { <ide> checkResults(results[1], <ide> { server: { ALPN: 'b' }, <ide> client: { ALPN: 'b' } }); <del> // nothing is selected by ALPN <add> // Nothing is selected by ALPN <ide> checkResults(results[2], <ide> { server: { ALPN: false }, <ide> client: { ALPN: false } }); <ide> function Test2() { <ide> const clientsOptions = [{}, {}, {}]; <ide> <ide> runTest(clientsOptions, serverOptions, function(results) { <del> // nothing is selected by ALPN <add> // Nothing is selected by ALPN <ide> checkResults(results[0], <ide> { server: { ALPN: false }, <ide> client: { ALPN: false } }); <del> // nothing is selected by ALPN <add> // Nothing is selected by ALPN <ide> checkResults(results[1], <ide> { server: { ALPN: false }, <ide> client: { ALPN: false } }); <del> // nothing is selected by ALPN <add> // Nothing is selected by ALPN <ide> checkResults(results[2], <ide> { server: { ALPN: false }, <ide> client: { ALPN: false } }); <ide><path>test/parallel/test-tls-client-renegotiation-limit.js <ide> const fixtures = require('../common/fixtures'); <ide> // Renegotiation as a protocol feature was dropped after TLS1.2. <ide> tls.DEFAULT_MAX_VERSION = 'TLSv1.2'; <ide> <del>// renegotiation limits to test <add>// Renegotiation limits to test <ide> const LIMITS = [0, 1, 2, 3, 5, 10, 16]; <ide> <ide> { <ide> function test(next) { <ide> assert.strictEqual(hadErr, false); <ide> }); <ide> <del> // simulate renegotiation attack <add> // Simulate renegotiation attack <ide> function spam() { <ide> client.write(''); <ide> client.renegotiate({}, (err) => { <ide><path>test/parallel/test-tls-connect-memleak.js <ide> const gcListener = { ongc() { collected = true; } }; <ide> server.address().port, <ide> { rejectUnauthorized: false }, <ide> common.mustCall(() => { <del> assert.strictEqual(gcObject, gcObject); // keep reference alive <add> assert.strictEqual(gcObject, gcObject); // Keep reference alive <ide> assert.strictEqual(collected, false); <ide> setImmediate(done, sock); <ide> })); <ide><path>test/parallel/test-tls-ecdh-auto.js <ide> const options = { <ide> ecdhCurve: 'auto' <ide> }; <ide> <del>const reply = 'I AM THE WALRUS'; // something recognizable <add>const reply = 'I AM THE WALRUS'; // Something recognizable <ide> <ide> const server = tls.createServer(options, function(conn) { <ide> conn.end(reply); <ide><path>test/parallel/test-tls-ecdh-multiple.js <ide> const options = { <ide> ecdhCurve: 'secp256k1:prime256v1:secp521r1' <ide> }; <ide> <del>const reply = 'I AM THE WALRUS'; // something recognizable <add>const reply = 'I AM THE WALRUS'; // Something recognizable <ide> <ide> const server = tls.createServer(options, function(conn) { <ide> conn.end(reply); <ide><path>test/parallel/test-tls-ecdh.js <ide> const options = { <ide> ecdhCurve: 'prime256v1' <ide> }; <ide> <del>const reply = 'I AM THE WALRUS'; // something recognizable <add>const reply = 'I AM THE WALRUS'; // Something recognizable <ide> <ide> const server = tls.createServer(options, common.mustCall(function(conn) { <ide> conn.end(reply); <ide><path>test/parallel/test-tls-socket-close.js <ide> const tlsServer = tls.createServer({ cert, key }, (socket) => { <ide> let netSocket; <ide> // plain tcp server <ide> const netServer = net.createServer((socket) => { <del> // if client wants to use tls <add> // If client wants to use tls <ide> tlsServer.emit('connection', socket); <ide> <ide> netSocket = socket; <ide><path>test/parallel/test-url-fileurltopath.js <ide> assert.throws(() => url.fileURLToPath('https://a/b/c'), { <ide> let testCases; <ide> if (isWindows) { <ide> testCases = [ <del> // lowercase ascii alpha <add> // Lowercase ascii alpha <ide> { path: 'C:\\foo', fileURL: 'file:///C:/foo' }, <del> // uppercase ascii alpha <add> // Uppercase ascii alpha <ide> { path: 'C:\\FOO', fileURL: 'file:///C:/FOO' }, <ide> // dir <ide> { path: 'C:\\dir\\foo', fileURL: 'file:///C:/dir/foo' }, <ide> assert.throws(() => url.fileURLToPath('https://a/b/c'), { <ide> { path: 'C:\\foo\rbar', fileURL: 'file:///C:/foo%0Dbar' }, <ide> // latin1 <ide> { path: 'C:\\fóóbàr', fileURL: 'file:///C:/f%C3%B3%C3%B3b%C3%A0r' }, <del> // euro sign (BMP code point) <add> // Euro sign (BMP code point) <ide> { path: 'C:\\€', fileURL: 'file:///C:/%E2%82%AC' }, <ide> // Rocket emoji (non-BMP code point) <ide> { path: 'C:\\🚀', fileURL: 'file:///C:/%F0%9F%9A%80' } <ide> ]; <ide> } else { <ide> testCases = [ <del> // lowercase ascii alpha <add> // Lowercase ascii alpha <ide> { path: '/foo', fileURL: 'file:///foo' }, <del> // uppercase ascii alpha <add> // Uppercase ascii alpha <ide> { path: '/FOO', fileURL: 'file:///FOO' }, <ide> // dir <ide> { path: '/dir/foo', fileURL: 'file:///dir/foo' }, <ide> assert.throws(() => url.fileURLToPath('https://a/b/c'), { <ide> { path: '/foo\rbar', fileURL: 'file:///foo%0Dbar' }, <ide> // latin1 <ide> { path: '/fóóbàr', fileURL: 'file:///f%C3%B3%C3%B3b%C3%A0r' }, <del> // euro sign (BMP code point) <add> // Euro sign (BMP code point) <ide> { path: '/€', fileURL: 'file:///%E2%82%AC' }, <ide> // Rocket emoji (non-BMP code point) <ide> { path: '/🚀', fileURL: 'file:///%F0%9F%9A%80' }, <ide><path>test/parallel/test-url-format.js <ide> const formatTests = { <ide> pathname: '/fooA100%mBr', <ide> }, <ide> <del> // multiple `#` in search <add> // Multiple `#` in search <ide> 'http://example.com/?foo=bar%231%232%233&abc=%234%23%235#frag': { <ide> href: 'http://example.com/?foo=bar%231%232%233&abc=%234%23%235#frag', <ide> protocol: 'http:', <ide><path>test/parallel/test-url-parse-format.js <ide> const parseTests = { <ide> path: '/Y' <ide> }, <ide> <del> // whitespace in the front <add> // Whitespace in the front <ide> ' http://www.example.com/': { <ide> href: 'http://www.example.com/', <ide> protocol: 'http:', <ide> const parseTests = { <ide> href: 'http://a%0D%22%20%09%0A%3C\'b:b@c/%0D%0Ad/e?f' <ide> }, <ide> <del> // git urls used by npm <add> // Git urls used by npm <ide> 'git+ssh://[email protected]:npm/npm': { <ide> protocol: 'git+ssh:', <ide> slashes: true, <ide><path>test/parallel/test-url-pathtofileurl.js <ide> const url = require('url'); <ide> let testCases; <ide> if (isWindows) { <ide> testCases = [ <del> // lowercase ascii alpha <add> // Lowercase ascii alpha <ide> { path: 'C:\\foo', expected: 'file:///C:/foo' }, <del> // uppercase ascii alpha <add> // Uppercase ascii alpha <ide> { path: 'C:\\FOO', expected: 'file:///C:/FOO' }, <ide> // dir <ide> { path: 'C:\\dir\\foo', expected: 'file:///C:/dir/foo' }, <ide> const url = require('url'); <ide> { path: 'C:\\foo\rbar', expected: 'file:///C:/foo%0Dbar' }, <ide> // latin1 <ide> { path: 'C:\\fóóbàr', expected: 'file:///C:/f%C3%B3%C3%B3b%C3%A0r' }, <del> // euro sign (BMP code point) <add> // Euro sign (BMP code point) <ide> { path: 'C:\\€', expected: 'file:///C:/%E2%82%AC' }, <ide> // Rocket emoji (non-BMP code point) <ide> { path: 'C:\\🚀', expected: 'file:///C:/%F0%9F%9A%80' } <ide> ]; <ide> } else { <ide> testCases = [ <del> // lowercase ascii alpha <add> // Lowercase ascii alpha <ide> { path: '/foo', expected: 'file:///foo' }, <del> // uppercase ascii alpha <add> // Uppercase ascii alpha <ide> { path: '/FOO', expected: 'file:///FOO' }, <ide> // dir <ide> { path: '/dir/foo', expected: 'file:///dir/foo' }, <ide> const url = require('url'); <ide> { path: '/foo\rbar', expected: 'file:///foo%0Dbar' }, <ide> // latin1 <ide> { path: '/fóóbàr', expected: 'file:///f%C3%B3%C3%B3b%C3%A0r' }, <del> // euro sign (BMP code point) <add> // Euro sign (BMP code point) <ide> { path: '/€', expected: 'file:///%E2%82%AC' }, <ide> // Rocket emoji (non-BMP code point) <ide> { path: '/🚀', expected: 'file:///%F0%9F%9A%80' }, <ide><path>test/parallel/test-url-relative.js <ide> const assert = require('assert'); <ide> const inspect = require('util').inspect; <ide> const url = require('url'); <ide> <del>// when source is false <add>// When source is false <ide> assert.strictEqual(url.resolveObject('', 'foo'), 'foo'); <ide> <ide> /* <ide> const relativeTests2 = [ <ide> ['g/', bases[0], 'http://a/b/c/g/'], <ide> ['/g', bases[0], 'http://a/g'], <ide> ['//g', bases[0], 'http://g/'], <del> // changed with RFC 2396bis <add> // Changed with RFC 2396bis <ide> // ('?y', bases[0], 'http://a/b/c/d;p?y'], <ide> ['?y', bases[0], 'http://a/b/c/d;p?y'], <ide> ['g?y', bases[0], 'http://a/b/c/g?y'], <del> // changed with RFC 2396bis <add> // Changed with RFC 2396bis <ide> // ('#s', bases[0], CURRENT_DOC_URI + '#s'], <ide> ['#s', bases[0], 'http://a/b/c/d;p?q#s'], <ide> ['g#s', bases[0], 'http://a/b/c/g#s'], <ide> ['g?y#s', bases[0], 'http://a/b/c/g?y#s'], <ide> [';x', bases[0], 'http://a/b/c/;x'], <ide> ['g;x', bases[0], 'http://a/b/c/g;x'], <ide> ['g;x?y#s', bases[0], 'http://a/b/c/g;x?y#s'], <del> // changed with RFC 2396bis <add> // Changed with RFC 2396bis <ide> // ('', bases[0], CURRENT_DOC_URI], <ide> ['', bases[0], 'http://a/b/c/d;p?q'], <ide> ['.', bases[0], 'http://a/b/c/'], <ide> const relativeTests2 = [ <ide> ['../../g', bases[0], 'http://a/g'], <ide> ['../../../g', bases[0], ('http://a/../g', 'http://a/g')], <ide> ['../../../../g', bases[0], ('http://a/../../g', 'http://a/g')], <del> // changed with RFC 2396bis <add> // Changed with RFC 2396bis <ide> // ('/./g', bases[0], 'http://a/./g'], <ide> ['/./g', bases[0], 'http://a/g'], <del> // changed with RFC 2396bis <add> // Changed with RFC 2396bis <ide> // ('/../g', bases[0], 'http://a/../g'], <ide> ['/../g', bases[0], 'http://a/g'], <ide> ['g.', bases[0], 'http://a/b/c/g.'], <ide> const relativeTests2 = [ <ide> ['g/', bases[1], 'http://a/b/c/g/'], <ide> ['/g', bases[1], 'http://a/g'], <ide> ['//g', bases[1], 'http://g/'], <del> // changed in RFC 2396bis <add> // Changed in RFC 2396bis <ide> // ('?y', bases[1], 'http://a/b/c/?y'], <ide> ['?y', bases[1], 'http://a/b/c/d;p?y'], <ide> ['g?y', bases[1], 'http://a/b/c/g?y'], <ide> const relativeTests2 = [ <ide> ['g', bases[3], 'fred:///s//a/b/g'], <ide> ['./g', bases[3], 'fred:///s//a/b/g'], <ide> ['g/', bases[3], 'fred:///s//a/b/g/'], <del> ['/g', bases[3], 'fred:///g'], // may change to fred:///s//a/g <del> ['//g', bases[3], 'fred://g'], // may change to fred:///s//g <del> ['//g/x', bases[3], 'fred://g/x'], // may change to fred:///s//g/x <add> ['/g', bases[3], 'fred:///g'], // May change to fred:///s//a/g <add> ['//g', bases[3], 'fred://g'], // May change to fred:///s//g <add> ['//g/x', bases[3], 'fred://g/x'], // May change to fred:///s//g/x <ide> ['///g', bases[3], 'fred:///g'], <ide> ['./', bases[3], 'fred:///s//a/b/'], <ide> ['../', bases[3], 'fred:///s//a/'], <ide> const relativeTests2 = [ <ide> ['g', bases[4], 'http:///s//a/b/g'], <ide> ['./g', bases[4], 'http:///s//a/b/g'], <ide> ['g/', bases[4], 'http:///s//a/b/g/'], <del> ['/g', bases[4], 'http:///g'], // may change to http:///s//a/g <del> ['//g', bases[4], 'http://g/'], // may change to http:///s//g <del> ['//g/x', bases[4], 'http://g/x'], // may change to http:///s//g/x <add> ['/g', bases[4], 'http:///g'], // May change to http:///s//a/g <add> ['//g', bases[4], 'http://g/'], // May change to http:///s//g <add> ['//g/x', bases[4], 'http://g/x'], // May change to http:///s//g/x <ide> ['///g', bases[4], 'http:///g'], <ide> ['./', bases[4], 'http:///s//a/b/'], <ide> ['../', bases[4], 'http:///s//a/'], <ide><path>test/parallel/test-util-callbackify.js <ide> const values = [ <ide> } <ide> })); <ide> <del> // test a Promise factory <add> // Test a Promise factory <ide> function promiseFn() { <ide> return Promise.reject(value); <ide> } <ide><path>test/parallel/test-util-inspect.js <ide> assert.strictEqual(util.inspect('"\'${a}'), "'\"\\'${a}'"); <ide> assert.strictEqual(util.inspect(date), <ide> '{ CustomDate 2010-02-14T11:48:40.000Z foo: \'bar\' }'); <ide> <del> // check for null prototype <add> // Check for null prototype <ide> Object.setPrototypeOf(date, null); <ide> assert.strictEqual(util.inspect(date), <ide> '{ [Date: null prototype] 2010-02-14T11:48:40.000Z' + <ide> assert.strictEqual(util.inspect('"\'${a}'), "'\"\\'${a}'"); <ide> assert.strictEqual(util.inspect(date), <ide> '{ CustomDate Invalid Date foo: \'bar\' }'); <ide> <del> // check for null prototype <add> // Check for null prototype <ide> Object.setPrototypeOf(date, null); <ide> assert.strictEqual(util.inspect(date), <ide> '{ [Date: null prototype] Invalid Date foo: \'bar\' }'); <ide><path>test/parallel/test-util-isDeepStrictEqual.js <ide> notUtilIsDeepStrict( <ide> new Map([['1', 5], [0, 5], ['0', 5]]) <ide> ); <ide> <del>// undefined value in Map <add>// Undefined value in Map <ide> utilIsDeepStrict( <ide> new Map([[1, undefined]]), <ide> new Map([[1, undefined]]) <ide><path>test/parallel/test-v8-coverage.js <ide> function nextdir() { <ide> assert.strictEqual(output.stderr.toString(), ''); <ide> const fixtureCoverage = getFixtureCoverage('basic.js', coverageDirectory); <ide> assert.ok(fixtureCoverage); <del> // first branch executed. <add> // First branch executed. <ide> assert.strictEqual(fixtureCoverage.functions[0].ranges[0].count, 1); <ide> // Second branch did not execute. <ide> assert.strictEqual(fixtureCoverage.functions[0].ranges[1].count, 0); <ide> function nextdir() { <ide> assert.strictEqual(output.stderr.toString(), ''); <ide> const fixtureCoverage = getFixtureCoverage('exit-1.js', coverageDirectory); <ide> assert.ok(fixtureCoverage, 'coverage not found for file'); <del> // first branch executed. <add> // First branch executed. <ide> assert.strictEqual(fixtureCoverage.functions[0].ranges[0].count, 1); <ide> // Second branch did not execute. <ide> assert.strictEqual(fixtureCoverage.functions[0].ranges[1].count, 0); <ide> function nextdir() { <ide> assert.strictEqual(output.stderr.toString(), ''); <ide> const fixtureCoverage = getFixtureCoverage('sigint.js', coverageDirectory); <ide> assert.ok(fixtureCoverage); <del> // first branch executed. <add> // First branch executed. <ide> assert.strictEqual(fixtureCoverage.functions[0].ranges[0].count, 1); <ide> // Second branch did not execute. <ide> assert.strictEqual(fixtureCoverage.functions[0].ranges[1].count, 0); <ide> function nextdir() { <ide> const fixtureCoverage = getFixtureCoverage('subprocess.js', <ide> coverageDirectory); <ide> assert.ok(fixtureCoverage); <del> // first branch executed. <add> // First branch executed. <ide> assert.strictEqual(fixtureCoverage.functions[1].ranges[0].count, 1); <ide> // Second branch did not execute. <ide> assert.strictEqual(fixtureCoverage.functions[1].ranges[1].count, 0); <ide> } <ide> <del>// outputs coverage from worker. <add>// Outputs coverage from worker. <ide> { <ide> const coverageDirectory = path.join(tmpdir.path, nextdir()); <ide> const output = spawnSync(process.execPath, [ <ide> function nextdir() { <ide> const fixtureCoverage = getFixtureCoverage('subprocess.js', <ide> coverageDirectory); <ide> assert.ok(fixtureCoverage); <del> // first branch executed. <add> // First branch executed. <ide> assert.strictEqual(fixtureCoverage.functions[1].ranges[0].count, 1); <ide> // Second branch did not execute. <ide> assert.strictEqual(fixtureCoverage.functions[1].ranges[1].count, 0); <ide> function nextdir() { <ide> const fixtureCoverage = getFixtureCoverage('async-hooks.js', <ide> coverageDirectory); <ide> assert.ok(fixtureCoverage); <del> // first branch executed. <add> // First branch executed. <ide> assert.strictEqual(fixtureCoverage.functions[0].ranges[0].count, 1); <ide> } <ide> <ide> function nextdir() { <ide> const fixtureCoverage = getFixtureCoverage('basic.js', <ide> absoluteCoverageDirectory); <ide> assert.ok(fixtureCoverage); <del> // first branch executed. <add> // First branch executed. <ide> assert.strictEqual(fixtureCoverage.functions[0].ranges[0].count, 1); <ide> // Second branch did not execute. <ide> assert.strictEqual(fixtureCoverage.functions[0].ranges[1].count, 0); <ide><path>test/parallel/test-vm-global-property-interceptors.js <ide> assert.deepEqual(result, { <ide> g: undefined <ide> }); <ide> <del>// define new properties <add>// Define new properties <ide> vm.runInContext(` <ide> Object.defineProperty(this, 'h', {value: 'h'}); <ide> Object.defineProperty(this, 'i', {}); <ide><path>test/parallel/test-whatwg-url-custom-properties.js <ide> const assert = require('assert'); <ide> const urlToOptions = require('internal/url').urlToOptions; <ide> <ide> const url = new URL('http://user:[email protected]:21/aaa/zzz?l=24#test'); <del>const oldParams = url.searchParams; // for test of [SameObject] <add>const oldParams = url.searchParams; // For test of [SameObject] <ide> <ide> // To retrieve enumerable but not necessarily own properties, <ide> // we need to use the for-in loop. <ide><path>test/parallel/test-zlib-invalid-input.js <ide> unzips.forEach(common.mustCall((uz, i) => { <ide> uz.on('error', common.mustCall()); <ide> uz.on('end', common.mustNotCall); <ide> <del> // this will trigger error event <add> // This will trigger error event <ide> uz.write('this is not valid compressed data.'); <ide> }, unzips.length)); <ide><path>test/parallel/test-zlib-random-byte-pipes.js <ide> class RandomReadStream extends Stream { <ide> // base block size. <ide> opt.block = opt.block || 256 * 1024; <ide> <del> // total number of bytes to emit <add> // Total number of bytes to emit <ide> opt.total = opt.total || 256 * 1024 * 1024; <ide> this._remaining = opt.total; <ide> <ide><path>test/parallel/test-zlib-truncated.js <ide> const errMessage = /unexpected end of file/; <ide> assert.strictEqual(toUTF8(result), inputString); <ide> }); <ide> <del> // sync truncated input test <add> // Sync truncated input test <ide> assert.throws(function() { <ide> zlib[methods.decompSync](truncated); <ide> }, errMessage); <ide> <del> // async truncated input test <add> // Async truncated input test <ide> zlib[methods.decomp](truncated, function(err, result) { <ide> assert(errMessage.test(err.message)); <ide> }); <ide><path>test/parallel/test-zlib.js <ide> let trickle = [128, 1024, 1024 * 1024]; <ide> // several different chunk sizes <ide> let chunkSize = [128, 1024, 1024 * 16, 1024 * 1024]; <ide> <del>// this is every possible value. <add>// This is every possible value. <ide> let level = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; <ide> let windowBits = [8, 9, 10, 11, 12, 13, 14, 15]; <ide> let memLevel = [1, 2, 3, 4, 5, 6, 7, 8, 9]; <ide> testFiles.forEach(common.mustCall((file) => { <ide> }, testFiles.length)); <ide> <ide> <del>// stream that saves everything <add>// Stream that saves everything <ide> class BufferStream extends stream.Stream { <ide> constructor() { <ide> super(); <ide> testKeys.forEach(common.mustCall((file) => { <ide> } <ide> })); <ide> <del> // the magic happens here. <add> // The magic happens here. <ide> ss.pipe(def).pipe(inf).pipe(buf); <ide> ss.end(test); <ide> }, zlibPairs.length)); <ide><path>test/pummel/test-fs-watch-file.js <ide> const filenameTwo = 'hasOwnProperty'; <ide> const filepathTwo = filenameTwo; <ide> const filepathTwoAbs = path.join(testDir, filenameTwo); <ide> <del>const filenameThree = 'charm'; // because the third time is <add>const filenameThree = 'charm'; // Because the third time is <ide> <ide> const filenameFour = 'get'; <ide> <ide><path>test/pummel/test-timers.js <ide> setTimeout(common.mustCall(function() { <ide> assert.strictEqual(1000 - WINDOW < diff && diff < 1000 + WINDOW, true); <ide> }), 1000); <ide> <del>// this timer shouldn't execute <add>// This timer shouldn't execute <ide> const id = setTimeout(function() { assert.strictEqual(true, false); }, 500); <ide> clearTimeout(id); <ide> <ide><path>test/sequential/test-child-process-emfile.js <ide> proc.on('error', common.mustCall(function(err) { <ide> <ide> proc.on('exit', common.mustNotCall('"exit" event should not be emitted')); <ide> <del>// close one fd for LSan <add>// Close one fd for LSan <ide> if (openFds.length >= 1) { <ide> fs.closeSync(openFds.pop()); <ide> } <ide><path>test/sequential/test-cli-syntax-bad.js <ide> const syntaxErrorRE = /^SyntaxError: \b/m; <ide> assert.strictEqual(err.code, 1, <ide> `code ${err.code} !== 1 for error:\n\n${err}`); <ide> <del> // no stdout should be produced <add> // No stdout should be produced <ide> assert.strictEqual(stdout, ''); <ide> <ide> // Stderr should have a syntax error message <ide><path>test/sequential/test-cli-syntax-file-not-found.js <ide> const notFoundRE = /^Error: Cannot find module/m; <ide> const _args = args.concat(file); <ide> const cmd = [node, ..._args].join(' '); <ide> exec(cmd, common.mustCall((err, stdout, stderr) => { <del> // no stdout should be produced <add> // No stdout should be produced <ide> assert.strictEqual(stdout, ''); <ide> <ide> // `stderr` should have a module not found error message. <ide><path>test/sequential/test-cli-syntax-require.js <ide> const node = process.execPath; <ide> // depending on the JavaScript engine. <ide> const syntaxErrorRE = /^SyntaxError: \b/m; <ide> <del>// should work with -r flags <add>// Should work with -r flags <ide> ['-c', '--check'].forEach(function(checkFlag) { <ide> ['-r', '--require'].forEach(function(requireFlag) { <ide> const preloadFile = fixtures.path('no-wrapper.js'); <ide> const syntaxErrorRE = /^SyntaxError: \b/m; <ide> assert.strictEqual(err.code, 1, <ide> `code ${err.code} !== 1 for error:\n\n${err}`); <ide> <del> // no stdout should be produced <add> // No stdout should be produced <ide> assert.strictEqual(stdout, ''); <ide> <ide> // stderr should have a syntax error message <ide><path>test/sequential/test-http-keepalive-maxsockets.js <ide> server.listen(0, function() { <ide> assert.strictEqual(count(agent.sockets), 0); <ide> assert.strictEqual(serverSockets.length, 5); <ide> <del> // now make 10 more reqs. <add> // Now make 10 more reqs. <ide> // should use the 2 free reqs from the pool first. <ide> makeReqs(10, function(er) { <ide> assert.ifError(er); <ide><path>test/sequential/test-stream2-stderr-sync.js <ide> function child0() { <ide> console.error('baz'); <ide> } <ide> <del>// using process.stderr <add>// Using process.stderr <ide> function child1() { <ide> process.stderr.write('child 1\n'); <ide> process.stderr.write('foo\n'); <ide><path>tools/eslint-rules/required-modules.js <ide> const path = require('path'); <ide> //------------------------------------------------------------------------------ <ide> <ide> module.exports = function(context) { <del> // trim required module names <add> // Trim required module names <ide> const requiredModules = context.options; <ide> const isESM = context.parserOptions.sourceType === 'module'; <ide> <ide><path>tools/test-npm-package.js <ide> const nodePath = path.dirname(process.execPath); <ide> <ide> function spawnCopyDeepSync(source, destination) { <ide> if (common.isWindows) { <del> mkdirSync(destination); // prevent interactive prompt <add> mkdirSync(destination); // Prevent interactive prompt <ide> return spawnSync('xcopy.exe', ['/E', source, destination]); <ide> } else { <ide> return spawnSync('cp', ['-r', `${source}/`, destination]);
232
Python
Python
correct os assignment in _to_node()
d41d5cf9b7d07780e15ea9f9ca86ce744fbb7a87
<ide><path>libcloud/container/drivers/kubernetes.py <ide> def _to_node(self, data): <ide> extra=extra_size, <ide> ) <ide> extra = {"memory": memory, "cpu": cpu} <del> labels = data["metadata"]["labels"] <del> os = labels.get("beta.kubernetes.io/os") or labels.get("kubernetes.io/os") <del> if os: <del> extra["os"] = os <add> extra["os"] = data["status"]["nodeInfo"]["operatingSystem"] <ide> # TODO: Find state <ide> state = NodeState.UNKNOWN <ide> public_ips, private_ips = [], []
1
Javascript
Javascript
use strict equality comparison
9c460e10d10570c6783fd51e885a50ea99464943
<ide><path>lib/internal/v8_prof_polyfill.js <ide> const os = { <ide> // Filter out vdso and vsyscall entries. <ide> const arg = args[args.length - 1]; <ide> if (arg === '[vdso]' || <del> arg == '[vsyscall]' || <add> arg === '[vsyscall]' || <ide> /^[0-9a-f]+-[0-9a-f]+$/.test(arg)) { <ide> return ''; <ide> } <ide><path>lib/punycode.js <ide> const encode = function(input) { <ide> if (currentValue < n && ++delta > maxInt) { <ide> error('overflow'); <ide> } <del> if (currentValue == n) { <add> if (currentValue === n) { <ide> // Represent delta as a generalized variable-length integer. <ide> let q = delta; <ide> for (let k = base; /* no condition */; k += base) { <ide> const encode = function(input) { <ide> } <ide> <ide> output.push(stringFromCharCode(digitToBasic(q, 0))); <del> bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); <add> bias = adapt(delta, handledCPCountPlusOne, handledCPCount === basicLength); <ide> delta = 0; <ide> ++handledCPCount; <ide> }
2
Javascript
Javascript
enable tests that are now passing
c43cb492c073b079a6a69fc71a2c1e42ff837d83
<ide><path>packages/ember-htmlbars/tests/helpers/input_test.js <ide> QUnit.test("cursor position is not lost when updating content", function() { <ide> input.selectionStart = 3; <ide> input.selectionEnd = 3; <ide> }); <del> <ide> run(null, set, controller, 'val', 'derp'); <ide> <ide> equal(view.$('input').val(), "derp", "updates text field after value changes"); <ide> QUnit.module("{{input type='text'}} - null/undefined values", { <ide> } <ide> }); <ide> <del>QUnit.skip("placeholder attribute bound to undefined is not present", function() { <add>QUnit.test("placeholder attribute bound to undefined is not present", function() { <ide> view = View.extend({ <ide> container: container, <ide> controller: {}, <ide><path>packages/ember-template-compiler/tests/system/compile_test.js <ide> QUnit.test('the template revision is different than the HTMLBars default revisio <ide> ok(actual.revision !== expected.revision, 'revision differs from default'); <ide> }); <ide> <del>QUnit.skip('{{with}} template deprecation includes moduleName if provided', function() { <add>QUnit.test('{{with}} template deprecation includes moduleName if provided', function() { <ide> var templateString = "{{#with foo as bar}} {{bar}} {{/with}}"; <ide> <ide> expectDeprecation(function() {
2
Text
Text
add more lts update steps to release guide
e17e903188b30a1acd3a65594dc5a1847b24ac73
<ide><path>doc/contributing/releases.md <ide> existing labels for that release line, such as `vN.x`. <ide> If the release is transitioning from Active LTS to Maintenance, the <ide> `backport-requested-vN.x` label must be deleted. <ide> <add>### Add new codename to nodejs-latest-linker <add> <add>In order to make sure a download URL <add>(e.g: <https://nodejs.org/download/release/latest-codename/>) will be available <add>for the new LTS release line you need to submit a PR to <add><https://github.com/nodejs/nodejs-latest-linker> and add a new entry for the <add>new LTS codename in its `ltsNames` map located in the `./latest-linker.js` <add>file. <add> <add>Make sure to reach out to the Build WG in order to validate that the new URL is <add>available as part of the LTS release promotion. <add> <add>### Update Release repo info <add> <add>Add the new LTS codename to the release schedule table located in the <add>`./README.md` file located at the <https://github.com/nodejs/Release> <add>repository along with the addition of the new codename to the `./schedule.json` <add>file in that same repo. <add> <ide> ## Major releases <ide> <ide> The process for cutting a new Node.js major release has a number of differences
1
Ruby
Ruby
fix error message when cask fails to install
3088faaf9c1bea9fdbf923a05a6aea984d32fedd
<ide><path>Library/Homebrew/cask/lib/hbc/installer.rb <ide> def install_artifacts <ide> end <ide> rescue StandardError => e <ide> begin <del> ofail e.message <ide> already_installed_artifacts.each do |artifact| <ide> odebug "Reverting installation of artifact of class #{artifact}" <ide> artifact.new(@cask, options).uninstall_phase <ide> end <ide> ensure <ide> purge_versioned_files <del> raise e.class, "An error occured during installation of Cask #{@cask}: #{e.message}" <add> raise e <ide> end <ide> end <ide>
1
Text
Text
clarify collaborators & ctc members relationships
accaa3437773b6f5b6a4853f4c4a1ed568379490
<ide><path>README.md <ide> more information about the governance of the Node.js project, see <ide> * [zkat](https://github.com/zkat) - <ide> **Kat Marchán** &lt;[email protected]&gt; <ide> <del>Collaborators & CTC members follow the [COLLABORATOR_GUIDE.md](./COLLABORATOR_GUIDE.md) in <del>maintaining the Node.js project. <add>Collaborators (which includes CTC members) follow the <add>[COLLABORATOR_GUIDE.md](./COLLABORATOR_GUIDE.md) in maintaining the Node.js <add>project. <ide> <ide> ### Release Team <ide>
1
Text
Text
improve http.request documentation
27ee08363b0465e2a4fbe1833f31b049a42f90a0
<ide><path>doc/api/http.md <ide> const req = http.request(options, (res) => { <ide> }); <ide> ``` <ide> <add>In a successful request, the following events will be emitted in the following <add>order: <add> <add>* `socket` <add>* `response` <add> * `data` any number of times, on the `res` object <add> (`data` will not be emitted at all if the response body is empty, for <add> instance, in most redirects) <add> * `end` on the `res` object <add>* `close` <add> <add>In the case of a connection error, the following events will be emitted: <add> <add>* `socket` <add>* `error` <add>* `close` <add> <add>If `req.abort()` is called before the connection succeeds, the following events <add>will be emitted in the following order: <add> <add>* `socket` <add>* (`req.abort()` called here) <add>* `abort` <add>* `close` <add>* `error` with an error with message `Error: socket hang up` and code <add> `ECONNRESET` <add> <add>If `req.abort()` is called after the response is received, the following events <add>will be emitted in the following order: <add> <add>* `socket` <add>* `response` <add> * `data` any number of times, on the `res` object <add>* (`req.abort()` called here) <add>* `abort` <add>* `close` <add> * `aborted` on the `res` object <add> * `end` on the `res` object <add> * `close` on the `res` object <add> <add>Note that setting the `timeout` option or using the `setTimeout` function will <add>not abort the request or do anything besides add a `timeout` event. <add> <ide> [`'checkContinue'`]: #http_event_checkcontinue <ide> [`'request'`]: #http_event_request <ide> [`'response'`]: #http_event_response
1
Python
Python
update dummy compute driver
68a51380d7b9a06149d55be97a11ce891c6b0418
<ide><path>libcloud/compute/drivers/dummy.py <ide> import socket <ide> import struct <ide> <del>from libcloud.base import ConnectionKey, NodeDriver, NodeSize, NodeLocation <del>from libcloud.compute.base import NodeImage, Node <add>from libcloud.common.base import ConnectionKey <add>from libcloud.compute.base import NodeImage, NodeSize, Node <add>from libcloud.compute.base import NodeDriver, NodeLocation <ide> from libcloud.compute.types import Provider,NodeState <ide> <ide> class DummyConnection(ConnectionKey): <ide> def list_nodes(self): <ide> As more nodes are added, list_nodes will return them <ide> <ide> >>> node=driver.create_node() <add> >>> node.size.id <add> 's1' <add> >>> node.image.id <add> 'i2' <ide> >>> sorted([node.name for node in driver.list_nodes()]) <ide> ['dummy-1', 'dummy-2', 'dummy-3'] <ide> """ <ide> def create_node(self, **kwargs): <ide> public_ip=['127.0.0.%d' % l], <ide> private_ip=[], <ide> driver=self, <add> size=NodeSize(id='s1', name='foo', ram=2048, <add> disk=160, bandwidth=None, price=0.0, <add> driver=self), <add> image=NodeImage(id='i2', name='image', driver=self), <ide> extra={'foo': 'bar'}) <ide> self.nl.append(n) <ide> return n
1
Text
Text
update suse docs
5d6bb52976456d35e3d3a725b8c93992b3e69f31
<ide><path>docs/installation/SUSE.md <ide> parent = "smn_linux" <ide> +++ <ide> <![end-metadata]--> <ide> <del># openSUSE <add># openSUSE and SUSE Linux Enterprise <ide> <del>Docker is available in **openSUSE 12.3 and later**. Please note that due <del>to its current limitations Docker is able to run only **64 bit** architecture. <add>This page provides instructions for installing and configuring the lastest <add>Docker Engine software on openSUSE and SUSE systems. <ide> <del>Docker is not part of the official repositories of openSUSE 12.3 and <del>openSUSE 13.1. Hence it is necessary to add the [Virtualization <del>repository](https://build.opensuse.org/project/show/Virtualization) from <del>[OBS](https://build.opensuse.org/) to install the `docker` package. <add>>**Note:** You can also find bleeding edge Docker versions inside of the repositories maintained by the [Virtualization:containers project](https://build.opensuse.org/project/show/Virtualization:containers) on the [Open Build Service](https://build.opensuse.org/). This project delivers also other packages that are related with the Docker ecosystem (for example, Docker Compose). <ide> <del>Execute one of the following commands to add the Virtualization repository: <add>## Prerequisites <ide> <del> # openSUSE 12.3 <del> $ sudo zypper ar -f http://download.opensuse.org/repositories/Virtualization/openSUSE_12.3/ Virtualization <add>You must be running a 64 bit architecture. <ide> <del> # openSUSE 13.1 <del> $ sudo zypper ar -f http://download.opensuse.org/repositories/Virtualization/openSUSE_13.1/ Virtualization <add>## openSUSE <ide> <del>No extra repository is required for openSUSE 13.2 and later. <add>Docker is part of the official openSUSE repositories starting from 13.2. No <add>additional repository is required on your system. <ide> <del># SUSE Linux Enterprise <add>## SUSE Linux Enterprise <ide> <del>Docker is available in **SUSE Linux Enterprise 12 and later**. Please note that <del>due to its current limitations Docker is able to run only on **64 bit** <del>architecture. <add>Docker is officially supported on SUSE Linux Enterprise 12 and later. You can find the latest supported Docker packages inside the `Container` module. To enable this module, do the following: <ide> <del>## Installation <add>1. Start YaST, and select *Software > Software Repositories*. <add>2. Click *Add* to open the add-on dialog. <add>3. Select *Extensions and Module from Registration Server* and click *Next*. <add>4. From the list of available extensions and modules, select *Container Module* and click *Next*. <add> The containers module and its repositories are added to your system. <add>5. If you use Subscription Management Tool, update the list of repositories at the SMT server. <ide> <del>Install the Docker package. <add>Otherwise execute the following command: <ide> <del> $ sudo zypper in docker <add> $ sudo SUSEConnect -p sle-module-containers/12/x86_64 -r '' <ide> <del>Now that it's installed, let's start the Docker daemon. <add> >**Note:** currently the `-r ''` flag is required to avoid a known limitation of `SUSEConnect`. <ide> <del> $ sudo systemctl start docker <add>The [Virtualization:containers project](https://build.opensuse.org/project/show/Virtualization:containers) <add>on the [Open Build Service](https://build.opensuse.org/) contains also bleeding <add>edge Docker packages for SUSE Linux Enterprise. However these packages are <add>**not supported** by SUSE. <ide> <del>If we want Docker to start at boot, we should also: <add>### Install Docker <ide> <del> $ sudo systemctl enable docker <add>1. Install the Docker package: <ide> <del>The docker package creates a new group named docker. Users, other than <del>root user, need to be part of this group in order to interact with the <del>Docker daemon. You can add users with: <add> $ sudo zypper in docker <ide> <del> $ sudo /usr/sbin/usermod -a -G docker <username> <add>2. Start the Docker daemon. <ide> <del>To verify that everything has worked as expected: <add> $ sudo systemctl start docker <ide> <del> $ sudo docker run --rm -i -t opensuse /bin/bash <add>3. Test the Docker installation. <ide> <del>This should download and import the `opensuse` image, and then start `bash` in <del>a container. To exit the container type `exit`. <add> $ sudo docker run hello-world <ide> <del>If you want your containers to be able to access the external network you must <del>enable the `net.ipv4.ip_forward` rule. <del>This can be done using YaST by browsing to the <del>`System -> Network Settings -> Routing` menu (for openSUSE Tumbleweed and later) or `Network Devices -> Network Settings -> Routing` menu (for SUSE Linux Enterprise 12 and previous openSUSE versions) and ensuring that the `Enable IPv4 Forwarding` box is checked. <add>## Configure Docker boot options <ide> <del>This option cannot be changed when networking is handled by the Network Manager. <del>In such cases the `/etc/sysconfig/SuSEfirewall2` file needs to be edited by <del>hand to ensure the `FW_ROUTE` flag is set to `yes` like so: <add>You can use these steps on openSUSE or SUSE Linux Enterprise. To start the `docker daemon` at boot, set the following: <ide> <del> FW_ROUTE="yes" <add> $ sudo systemctl enable docker <add> <add>The `docker` package creates a new group named `docker`. Users, other than <add>`root` user, must be part of this group to interact with the <add>Docker daemon. You can add users with this command syntax: <add> <add> sudo /usr/sbin/usermod -a -G docker <username> <ide> <add>Once you add a user, make sure they relog to pick up these new permissions. <ide> <del>**Done!** <add>## Enable external network access <add> <add>If you want your containers to be able to access the external network, you must <add>enable the `net.ipv4.ip_forward` rule. To do this, use YaST. <add> <add>For openSUSE Tumbleweed and later, browse to the **System -> Network Settings -> Routing** menu. For SUSE Linux Enterprise 12 and previous openSUSE versions, browse to **Network Devices -> Network Settings -> Routing** menu (f) and check the *Enable IPv4 Forwarding* box. <add> <add>When networking is handled by the Network Manager, instead of YaST you must edit <add>the `/etc/sysconfig/SuSEfirewall2` file needs by hand to ensure the `FW_ROUTE` <add>flag is set to `yes` like so: <add> <add> FW_ROUTE="yes" <ide> <ide> ## Custom daemon options <ide> <ide> If you need to add an HTTP Proxy, set a different directory or partition for the <del>Docker runtime files, or make other customizations, read our systemd article to <add>Docker runtime files, or make other customizations, read the systemd article to <ide> learn how to [customize your systemd Docker daemon options](/articles/systemd/). <ide> <ide> ## Uninstallation <ide> To uninstall the Docker package: <ide> <ide> $ sudo zypper rm docker <ide> <del>The above command will not remove images, containers, volumes, or user created <add>The above command does not remove images, containers, volumes, or user created <ide> configuration files on your host. If you wish to delete all images, containers, <ide> and volumes run the following command: <ide> <ide> $ rm -rf /var/lib/docker <ide> <ide> You must delete the user created configuration files manually. <ide> <del>## What's next <add>## Where to go from here <ide> <del>Continue with the [User Guide](/userguide/). <add>You can find more details about Docker on openSUSE or SUSE Linux Enterprise in <add>the [Docker quick start guide](https://www.suse.com/documentation/sles-12/dockerquick/data/dockerquick. <add>html) on the SUSE website. The document targets SUSE Linux Enterprise, but its contents apply also to openSUSE. <ide> <add>Continue to the [User Guide](/userguide/).
1
PHP
PHP
add cookie helper
c8257cd9daad692a35087d8e22629f1c32bd1045
<ide><path>src/Illuminate/Foundation/helpers.php <ide> function config($key, $default = null) <ide> } <ide> } <ide> <add>if ( ! function_exists('cookie')) <add>{ <add> /** <add> * Create a new cookie instance. <add> * <add> * @param string $name <add> * @param string $value <add> * @param int $minutes <add> * @param string $path <add> * @param string $domain <add> * @param bool $secure <add> * @param bool $httpOnly <add> * @return \Symfony\Component\HttpFoundation\Cookie <add> */ <add> function cookie($name, $value, $minutes = 0, $path = null, $domain = null, $secure = false, $httpOnly = true) <add> { <add> return app('Illuminate\Contracts\Cookie\Factory')->make($name, $value, $minutes, $path, $domain, $secure, $httpOnly); <add> } <add>} <add> <ide> if ( ! function_exists('csrf_token')) <ide> { <ide> /**
1
Mixed
Javascript
add updatesettings to both http2 servers
629e1ab5aa84b75cd26ee9208c909eff500a3a88
<ide><path>doc/api/http2.md <ide> A value of `0` will disable the timeout behavior on incoming connections. <ide> The socket timeout logic is set up on connection, so changing this <ide> value only affects new connections to the server, not any existing connections. <ide> <add>#### `server.updateSettings([settings])` <add><!-- YAML <add>added: REPLACEME <add>--> <add> <add>* `settings` {HTTP/2 Settings Object} <add> <add>Used to update the server with the provided settings. <add> <add>Throws `ERR_HTTP2_INVALID_SETTING_VALUE` for invalid `settings` values. <add> <add>Throws `ERR_INVALID_ARG_TYPE` for invalid `settings` argument. <add> <ide> ### Class: `Http2SecureServer` <ide> <!-- YAML <ide> added: v8.4.0 <ide> A value of `0` will disable the timeout behavior on incoming connections. <ide> The socket timeout logic is set up on connection, so changing this <ide> value only affects new connections to the server, not any existing connections. <ide> <add>#### `server.updateSettings([settings])` <add><!-- YAML <add>added: REPLACEME <add>--> <add> <add>* `settings` {HTTP/2 Settings Object} <add> <add>Used to update the server with the provided settings. <add> <add>Throws `ERR_HTTP2_INVALID_SETTING_VALUE` for invalid `settings` values. <add> <add>Throws `ERR_INVALID_ARG_TYPE` for invalid `settings` argument. <add> <ide> ### `http2.createServer(options[, onRequestHandler])` <ide> <!-- YAML <ide> added: v8.4.0 <ide><path>lib/internal/http2/core.js <ide> class Http2SecureServer extends TLSServer { <ide> } <ide> return this; <ide> } <add> <add> updateSettings(settings) { <add> assertIsObject(settings, 'settings'); <add> validateSettings(settings); <add> this[kOptions].settings = { ...this[kOptions].settings, ...settings }; <add> } <ide> } <ide> <ide> class Http2Server extends NETServer { <ide> class Http2Server extends NETServer { <ide> } <ide> return this; <ide> } <add> <add> updateSettings(settings) { <add> assertIsObject(settings, 'settings'); <add> validateSettings(settings); <add> this[kOptions].settings = { ...this[kOptions].settings, ...settings }; <add> } <ide> } <ide> <ide> Http2Server.prototype[EventEmitter.captureRejectionSymbol] = function( <ide><path>test/parallel/test-http2-update-settings.js <add>'use strict'; <add> <add>// This test ensures that the Http2SecureServer and Http2Server <add>// settings are updated when the setting object is valid. <add>// When the setting object is invalid, this test ensures that <add>// updateSettings throws an exception. <add> <add>const common = require('../common'); <add>if (!common.hasCrypto) { common.skip('missing crypto'); } <add>const assert = require('assert'); <add>const http2 = require('http2'); <add> <add>testUpdateSettingsWith({ <add> server: http2.createSecureServer(), <add> newServerSettings: { <add> 'headerTableSize': 1, <add> 'initialWindowSize': 1, <add> 'maxConcurrentStreams': 1, <add> 'maxHeaderListSize': 1, <add> 'maxFrameSize': 16385, <add> 'enablePush': false, <add> 'enableConnectProtocol': true <add> } <add>}); <add>testUpdateSettingsWith({ <add> server: http2.createServer(), <add> newServerSettings: { <add> 'enablePush': false <add> } <add>}); <add> <add>function testUpdateSettingsWith({ server, newServerSettings }) { <add> const oldServerSettings = getServerSettings(server); <add> assert.notDeepStrictEqual(oldServerSettings, newServerSettings); <add> server.updateSettings(newServerSettings); <add> const updatedServerSettings = getServerSettings(server); <add> assert.deepStrictEqual(updatedServerSettings, { ...oldServerSettings, <add> ...newServerSettings }); <add> assert.throws(() => server.updateSettings(''), { <add> message: 'The "settings" argument must be of type object. ' + <add> 'Received type string (\'\')', <add> code: 'ERR_INVALID_ARG_TYPE', <add> name: 'TypeError' <add> }); <add> assert.throws(() => server.updateSettings({ <add> 'maxHeaderListSize': 'foo' <add> }), { <add> message: 'Invalid value for setting "maxHeaderListSize": foo', <add> code: 'ERR_HTTP2_INVALID_SETTING_VALUE', <add> name: 'RangeError' <add> }); <add>} <add> <add>function getServerSettings(server) { <add> const options = Object <add> .getOwnPropertySymbols(server) <add> .find((s) => s.toString() === 'Symbol(options)'); <add> return server[options].settings; <add>}
3
Mixed
Javascript
expose stream api in cursorto()
462f43824f6af577bde27da76d9f33365eddcfe7
<ide><path>doc/api/readline.md <ide> function completer(linePartial, callback) { <ide> } <ide> ``` <ide> <del>## readline.cursorTo(stream, x, y) <add>## readline.cursorTo(stream, x, y[, callback]) <ide> <!-- YAML <ide> added: v0.7.7 <add>changes: <add> - version: REPLACEME <add> pr-url: https://github.com/nodejs/node/pull/28674 <add> description: The stream's write() callback and return value are exposed. <ide> --> <ide> <ide> * `stream` {stream.Writable} <ide> * `x` {number} <ide> * `y` {number} <add>* `callback` {Function} Invoked once the operation completes. <add>* Returns: {boolean} `false` if `stream` wishes for the calling code to wait for <add> the `'drain'` event to be emitted before continuing to write additional data; <add> otherwise `true`. <ide> <ide> The `readline.cursorTo()` method moves cursor to the specified position in a <ide> given [TTY][] `stream`. <ide><path>lib/readline.js <ide> function emitKeypressEvents(stream, iface) { <ide> * moves the cursor to the x and y coordinate on the given stream <ide> */ <ide> <del>function cursorTo(stream, x, y) { <del> if (stream === null || stream === undefined) <del> return; <add>function cursorTo(stream, x, y, callback) { <add> if (callback !== undefined && typeof callback !== 'function') <add> throw new ERR_INVALID_CALLBACK(callback); <ide> <del> if (typeof x !== 'number' && typeof y !== 'number') <del> return; <add> if (stream == null || (typeof x !== 'number' && typeof y !== 'number')) { <add> if (typeof callback === 'function') <add> process.nextTick(callback); <add> return true; <add> } <ide> <ide> if (typeof x !== 'number') <ide> throw new ERR_INVALID_CURSOR_POS(); <ide> <del> if (typeof y !== 'number') { <del> stream.write(CSI`${x + 1}G`); <del> } else { <del> stream.write(CSI`${y + 1};${x + 1}H`); <del> } <add> const data = typeof y !== 'number' ? CSI`${x + 1}G` : CSI`${y + 1};${x + 1}H`; <add> return stream.write(data, callback); <ide> } <ide> <ide> /** <ide><path>test/parallel/test-readline-csi.js <ide> assert.strictEqual(readline.moveCursor(undefined, 1, 1, common.mustCall()), <ide> true); <ide> <ide> // Undefined or null as stream should not throw. <del>readline.cursorTo(null); <del>readline.cursorTo(); <add>assert.strictEqual(readline.cursorTo(null), true); <add>assert.strictEqual(readline.cursorTo(), true); <add>assert.strictEqual(readline.cursorTo(null, 1, 1, common.mustCall()), true); <add>assert.strictEqual(readline.cursorTo(undefined, 1, 1, common.mustCall()), true); <ide> <ide> writable.data = ''; <del>readline.cursorTo(writable, 'a'); <add>assert.strictEqual(readline.cursorTo(writable, 'a'), true); <ide> assert.strictEqual(writable.data, ''); <ide> <ide> writable.data = ''; <del>readline.cursorTo(writable, 'a', 'b'); <add>assert.strictEqual(readline.cursorTo(writable, 'a', 'b'), true); <ide> assert.strictEqual(writable.data, ''); <ide> <ide> writable.data = ''; <ide> common.expectsError( <ide> assert.strictEqual(writable.data, ''); <ide> <ide> writable.data = ''; <del>readline.cursorTo(writable, 1, 'a'); <add>assert.strictEqual(readline.cursorTo(writable, 1, 'a'), true); <ide> assert.strictEqual(writable.data, '\x1b[2G'); <ide> <ide> writable.data = ''; <del>readline.cursorTo(writable, 1, 2); <add>assert.strictEqual(readline.cursorTo(writable, 1, 2), true); <ide> assert.strictEqual(writable.data, '\x1b[3;2H'); <add> <add>writable.data = ''; <add>assert.strictEqual(readline.cursorTo(writable, 1, 2, common.mustCall()), true); <add>assert.strictEqual(writable.data, '\x1b[3;2H'); <add> <add>// Verify that cursorTo() throws on invalid callback. <add>assert.throws(() => { <add> readline.cursorTo(writable, 1, 1, null); <add>}, /ERR_INVALID_CALLBACK/);
3
Python
Python
update pool module import for theano
379966050454151db016f427dd7dae24a35e45e0
<ide><path>keras/backend/theano_backend.py <ide> import theano <ide> from theano import tensor as T <ide> from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams <del>from theano.tensor.signal import downsample <add>from theano.tensor.signal import pool <ide> from theano.tensor.nnet import conv3d2d <ide> import numpy as np <ide> from .common import _FLOATX, _EPSILON <ide> def pool2d(x, pool_size, strides=(1, 1), border_mode='valid', <ide> x = x.dimshuffle((0, 3, 1, 2)) <ide> <ide> if pool_mode == 'max': <del> pool_out = downsample.max_pool_2d(x, ds=pool_size, st=strides, <del> ignore_border=True, <del> padding=padding, <del> mode='max') <add> pool_out = pool.max_pool_2d(x, ds=pool_size, st=strides, <add> ignore_border=True, <add> padding=padding, <add> mode='max') <ide> elif pool_mode == 'avg': <del> pool_out = downsample.max_pool_2d(x, ds=pool_size, st=strides, <del> ignore_border=True, <del> padding=padding, <del> mode='average_exc_pad') <add> pool_out = pool.max_pool_2d(x, ds=pool_size, st=strides, <add> ignore_border=True, <add> padding=padding, <add> mode='average_exc_pad') <ide> else: <ide> raise Exception('Invalid pooling mode: ' + str(pool_mode)) <ide> <ide> def pool3d(x, pool_size, strides=(1, 1, 1), border_mode='valid', <ide> <ide> if pool_mode == 'max': <ide> # pooling over conv_dim2, conv_dim1 (last two channels) <del> output = downsample.max_pool_2d(input=x.dimshuffle(0, 1, 4, 3, 2), <del> ds=(pool_size[1], pool_size[0]), <del> st=(strides[1], strides[0]), <del> ignore_border=ignore_border, <del> padding=padding, <del> mode='max') <add> output = pool.max_pool_2d(input=x.dimshuffle(0, 1, 4, 3, 2), <add> ds=(pool_size[1], pool_size[0]), <add> st=(strides[1], strides[0]), <add> ignore_border=ignore_border, <add> padding=padding, <add> mode='max') <ide> <ide> # pooling over conv_dim3 <del> pool_out = downsample.max_pool_2d(input=output.dimshuffle(0, 1, 4, 3, 2), <del> ds=(1, pool_size[2]), <del> st=(1, strides[2]), <del> ignore_border=ignore_border, <del> padding=padding, <del> mode='max') <add> pool_out = pool.max_pool_2d(input=output.dimshuffle(0, 1, 4, 3, 2), <add> ds=(1, pool_size[2]), <add> st=(1, strides[2]), <add> ignore_border=ignore_border, <add> padding=padding, <add> mode='max') <ide> <ide> elif pool_mode == 'avg': <ide> # pooling over conv_dim2, conv_dim1 (last two channels) <del> output = downsample.max_pool_2d(input=x.dimshuffle(0, 1, 4, 3, 2), <del> ds=(pool_size[1], pool_size[0]), <del> st=(strides[1], strides[0]), <del> ignore_border=ignore_border, <del> padding=padding, <del> mode='average_exc_pad') <add> output = pool.max_pool_2d(input=x.dimshuffle(0, 1, 4, 3, 2), <add> ds=(pool_size[1], pool_size[0]), <add> st=(strides[1], strides[0]), <add> ignore_border=ignore_border, <add> padding=padding, <add> mode='average_exc_pad') <ide> <ide> # pooling over conv_dim3 <del> pool_out = downsample.max_pool_2d(input=output.dimshuffle(0, 1, 4, 3, 2), <del> ds=(1, pool_size[2]), <del> st=(1, strides[2]), <del> ignore_border=ignore_border, <del> padding=padding, <del> mode='average_exc_pad') <add> pool_out = pool.max_pool_2d(input=output.dimshuffle(0, 1, 4, 3, 2), <add> ds=(1, pool_size[2]), <add> st=(1, strides[2]), <add> ignore_border=ignore_border, <add> padding=padding, <add> mode='average_exc_pad') <ide> else: <ide> raise Exception('Invalid pooling mode: ' + str(pool_mode)) <ide>
1
Text
Text
remove unneeded dependencies from readme
fc72a809c1c60569fb1d70a95f5296feaa5220f8
<ide><path>README.md <ide> Installing on Ubuntu 12.04 and 12.10 <ide> 1. Install dependencies: <ide> <ide> ```bash <del> sudo apt-get install lxc wget bsdtar curl <add> sudo apt-get install lxc bsdtar <ide> sudo apt-get install linux-image-extra-`uname -r` <ide> ``` <ide>
1
PHP
PHP
fix cs errors in skel/
8f5acb09328395c71744eb1f9a1b49bb8488482e
<ide><path>app/Config/Schema/db_acl.php <ide> * @license http://www.opensource.org/licenses/mit-license.php MIT License <ide> */ <ide> <del>/* <add>/** <ide> * <ide> * Using the Schema command line utility <ide> * cake schema run create DbAcl <ide> * <ide> */ <ide> class DbAclSchema extends CakeSchema { <ide> <add>/** <add> * Before event. <add> * <add> * @param array $event The event data. <add> * @return boolean success <add> */ <ide> public function before($event = array()) { <ide> return true; <ide> } <ide> <add>/** <add> * After event. <add> * <add> * @param array $event The event data. <add> * @return void <add> */ <ide> public function after($event = array()) { <ide> } <ide> <ide><path>lib/Cake/Console/Templates/skel/Config/Schema/db_acl.php <ide> */ <ide> class DbAclSchema extends CakeSchema { <ide> <add>/** <add> * Before event. <add> * <add> * @param array $event The event data. <add> * @return boolean success <add> */ <ide> public function before($event = array()) { <ide> return true; <ide> } <ide> <add>/** <add> * After event. <add> * <add> * @param array $event The event data. <add> * @return void <add> */ <ide> public function after($event = array()) { <ide> } <ide> <add>/** <add> * ACO - Access Control Object - Something that is wanted <add> */ <ide> public $acos = array( <ide> 'id' => array('type' => 'integer', 'null' => false, 'default' => null, 'length' => 10, 'key' => 'primary'), <ide> 'parent_id' => array('type' => 'integer', 'null' => true, 'default' => null, 'length' => 10), <ide> public function after($event = array()) { <ide> 'indexes' => array('PRIMARY' => array('column' => 'id', 'unique' => 1)) <ide> ); <ide> <add>/** <add> * ARO - Access Request Object - Something that wants something <add> */ <ide> public $aros = array( <ide> 'id' => array('type' => 'integer', 'null' => false, 'default' => null, 'length' => 10, 'key' => 'primary'), <ide> 'parent_id' => array('type' => 'integer', 'null' => true, 'default' => null, 'length' => 10), <ide> public function after($event = array()) { <ide> 'indexes' => array('PRIMARY' => array('column' => 'id', 'unique' => 1)) <ide> ); <ide> <add>/** <add> * Used by the Cake::Model:Permission class. <add> * Checks if the given $aro has access to action $action in $aco. <add> */ <ide> public $aros_acos = array( <ide> 'id' => array('type' => 'integer', 'null' => false, 'default' => null, 'length' => 10, 'key' => 'primary'), <ide> 'aro_id' => array('type' => 'integer', 'null' => false, 'length' => 10, 'key' => 'index'), <ide><path>lib/Cake/Console/Templates/skel/Config/Schema/i18n.php <ide> */ <ide> class I18nSchema extends CakeSchema { <ide> <add>/** <add> * The name property <add> * <add> * @var string <add> */ <ide> public $name = 'i18n'; <ide> <add>/** <add> * Before event. <add> * <add> * @param array $event The event data. <add> * @return boolean success <add> */ <ide> public function before($event = array()) { <ide> return true; <ide> } <ide> <add>/** <add> * After event. <add> * <add> * @param array $event The event data. <add> * @return void <add> */ <ide> public function after($event = array()) { <ide> } <ide> <add>/** <add> * The i18n table property <add> * <add> * @var array <add> */ <ide> public $i18n = array( <ide> 'id' => array('type' => 'integer', 'null' => false, 'default' => null, 'length' => 10, 'key' => 'primary'), <ide> 'locale' => array('type' => 'string', 'null' => false, 'length' => 6, 'key' => 'index'), <ide><path>lib/Cake/Console/Templates/skel/Config/Schema/sessions.php <ide> * @since CakePHP(tm) v 0.2.9 <ide> */ <ide> <del>/* <add>/** <ide> * <ide> * Using the Schema command line utility <ide> * cake schema run create Sessions <ide> * <ide> */ <ide> class SessionsSchema extends CakeSchema { <ide> <add>/** <add> * Name property <add> * <add> * @var string <add> */ <ide> public $name = 'Sessions'; <ide> <add>/** <add> * Before event. <add> * <add> * @param array $event The event data. <add> * @return bool Success <add> */ <ide> public function before($event = array()) { <ide> return true; <ide> } <ide> <add>/** <add> * After event. <add> * <add> * @param array $event The event data. <add> * @return void <add> */ <ide> public function after($event = array()) { <ide> } <ide> <add>/** <add> * cake_sessions table definition <add> * <add> * @var array <add> */ <ide> public $cake_sessions = array( <ide> 'id' => array('type' => 'string', 'null' => false, 'key' => 'primary'), <ide> 'data' => array('type' => 'text', 'null' => true, 'default' => null), <ide><path>lib/Cake/Console/Templates/skel/Controller/PagesController.php <ide> class PagesController extends AppController { <ide> /** <ide> * Displays a view <ide> * <del> * @param mixed What page to display <ide> * @return void <ide> * @throws NotFoundException When the view file could not be found <del> * or MissingViewException in debug mode. <add> * or MissingViewException in debug mode. <ide> */ <ide> public function display() { <ide> $path = func_get_args(); <ide><path>lib/Cake/Console/Templates/skel/Test/Case/AllTestsTest.php <ide> <ide> class AllTestsTest extends CakeTestSuite { <ide> <add>/** <add> * Get the suite object. <add> * <add> * @return CakeTestSuite Suite class instance. <add> */ <ide> public static function suite() { <ide> $suite = new CakeTestSuite('All application tests'); <ide> $suite->addTestDirectoryRecursive(TESTS . 'Case');
6
Go
Go
remove unnecessary conversions
33d8492ce413780e4e339631386541410677ecf3
<ide><path>pkg/system/syscall_windows.go <ide> func IsWindowsClient() bool { <ide> <ide> // Unmount is a platform-specific helper function to call <ide> // the unmount syscall. Not supported on Windows <del>func Unmount(dest string) error { <add>func Unmount(_ string) error { <ide> return nil <ide> } <ide> <ide> func CommandLineToArgv(commandLine string) ([]string, error) { <ide> <ide> newArgs := make([]string, argc) <ide> for i, v := range (*argv)[:argc] { <del> newArgs[i] = string(windows.UTF16ToString((*v)[:])) <add> newArgs[i] = windows.UTF16ToString((*v)[:]) <ide> } <ide> <ide> return newArgs, nil <ide> func GetSecurityDescriptorDacl(securityDescriptor *byte, daclPresent *uint32, da <ide> r1, _, e1 := syscall.Syscall6(procGetSecurityDescriptorDacl.Addr(), 4, uintptr(unsafe.Pointer(securityDescriptor)), uintptr(unsafe.Pointer(daclPresent)), uintptr(unsafe.Pointer(dacl)), uintptr(unsafe.Pointer(daclDefaulted)), 0, 0) <ide> if r1 == 0 { <ide> if e1 != 0 { <del> result = syscall.Errno(e1) <add> result = e1 <ide> } else { <ide> result = syscall.EINVAL <ide> }
1
Javascript
Javascript
increase querystring coverage
dc7d9eb0a94c6ca97c5ee0a06a6c4ae84b5b8575
<ide><path>test/parallel/test-querystring-escape.js <ide> assert.deepStrictEqual(qs.escape('test'), 'test'); <ide> assert.deepStrictEqual(qs.escape({}), '%5Bobject%20Object%5D'); <ide> assert.deepStrictEqual(qs.escape([5, 10]), '5%2C10'); <ide> assert.deepStrictEqual(qs.escape('Ŋōđĕ'), '%C5%8A%C5%8D%C4%91%C4%95'); <add>assert.deepStrictEqual(qs.escape('testŊōđĕ'), 'test%C5%8A%C5%8D%C4%91%C4%95'); <add>assert.deepStrictEqual(qs.escape(`${String.fromCharCode(0xD800 + 1)}test`), <add> '%F0%90%91%B4est'); <add>assert.throws(() => qs.escape(String.fromCharCode(0xD800 + 1)), <add> /^URIError: URI malformed$/); <ide> <ide> // using toString for objects <ide> assert.strictEqual( <ide> assert.strictEqual( <ide> ); <ide> <ide> // toString is not callable, must throw an error <del>assert.throws(() => qs.escape({toString: 5})); <add>assert.throws(() => qs.escape({toString: 5}), <add> /^TypeError: Cannot convert object to primitive value$/); <ide> <ide> // should use valueOf instead of non-callable toString <ide> assert.strictEqual(qs.escape({toString: 5, valueOf: () => 'test'}), 'test'); <ide> <del>assert.throws(() => qs.escape(Symbol('test'))); <add>assert.throws(() => qs.escape(Symbol('test')), <add> /^TypeError: Cannot convert a Symbol value to a string$/); <ide><path>test/parallel/test-querystring.js <ide> function demoDecode(str) { <ide> } <ide> check(qs.parse('a=a&b=b&c=c', null, null, { decodeURIComponent: demoDecode }), <ide> { aa: 'aa', bb: 'bb', cc: 'cc' }); <add>check(qs.parse('a=a&b=b&c=c', null, '==', { decodeURIComponent: (str) => str }), <add> { 'a=a': '', 'b=b': '', 'c=c': '' }); <ide> <ide> // Test QueryString.unescape <ide> function errDecode(str) {
2
PHP
PHP
fix comment mistakes
95989bd97efa7544ae596de1d2ca95ad74e6303e
<ide><path>Cake/Database/Connection.php <ide> public function supportsQuoting() { <ide> <ide> /** <ide> * Quotes a database identifier (a column name, table name, etc..) to <del> * be used safely in queries without the risk of using reserver words <add> * be used safely in queries without the risk of using reserved words <ide> * <ide> * @param string $identifier <ide> * @return string <ide><path>Cake/Database/Dialect/MysqlDialectTrait.php <ide> trait MysqlDialectTrait { <ide> /** <ide> * Get the schema dialect. <ide> * <del> * Used by Cake\Schema package to reflect schema and <add> * Used by Cake\Database\Schema package to reflect schema and <ide> * generate schema. <ide> * <ide> * @return Cake\Database\Schema\MysqlSchema <ide><path>Cake/Database/Dialect/PostgresDialectTrait.php <ide> protected function _transformFunctionExpression(FunctionExpression $expression) <ide> /** <ide> * Get the schema dialect. <ide> * <del> * Used by Cake\Schema package to reflect schema and <add> * Used by Cake\Database\Schema package to reflect schema and <ide> * generate schema. <ide> * <ide> * @return Cake\Database\Schema\PostgresSchema <ide><path>Cake/Database/Dialect/SqliteDialectTrait.php <ide> protected function _insertQueryTranslator($query) { <ide> /** <ide> * Get the schema dialect. <ide> * <del> * Used by Cake\Schema package to reflect schema and <add> * Used by Cake\Database\Schema package to reflect schema and <ide> * generate schema. <ide> * <ide> * @return Cake\Database\Schema\SqliteSchema <ide><path>Cake/Database/Driver.php <ide> public abstract function queryTranslator($type); <ide> /** <ide> * Get the schema dialect. <ide> * <del> * Used by Cake\Schema package to reflect schema and <add> * Used by Cake\Database\Schema package to reflect schema and <ide> * generate schema. <ide> * <ide> * If all the tables that use this Driver specify their <ide> public abstract function schemaDialect(); <ide> <ide> /** <ide> * Quotes a database identifier (a column name, table name, etc..) to <del> * be used safely in queries without the risk of using reserver words <add> * be used safely in queries without the risk of using reserved words <ide> * <ide> * @param string $identifier <ide> * @return string <ide><path>Cake/Database/SqlDialectTrait.php <ide> trait SqlDialectTrait { <ide> <ide> /** <ide> * Quotes a database identifier (a column name, table name, etc..) to <del> * be used safely in queries without the risk of using reserver words <add> * be used safely in queries without the risk of using reserved words <ide> * <ide> * @param string $identifier <ide> * @return string
6
Ruby
Ruby
remove unused argument
45f8848ca7d7a1065c4c9f3d73946d908a382a9d
<ide><path>actionview/lib/action_view/template/text.rb <ide> class Template <ide> class Text #:nodoc: <ide> attr_accessor :type <ide> <del> def initialize(string, type = nil) <add> def initialize(string) <ide> @string = string.to_s <del> @type = Types[type] || type if type <del> @type ||= Types[:text] <add> @type = Types[:text] <ide> end <ide> <ide> def identifier <ide> def render(*args) <ide> end <ide> <ide> def formats <del> [@type.respond_to?(:ref) ? @type.ref : @type.to_s] <add> [@type.ref] <ide> end <ide> end <ide> end <ide><path>actionview/test/template/text_test.rb <ide> require "abstract_unit" <ide> <ide> class TextTest < ActiveSupport::TestCase <del> test "formats returns symbol for recognized MIME type" do <del> assert_equal [:text], ActionView::Template::Text.new("", :text).formats <del> end <del> <del> test "formats returns string for recognized MIME type when MIME does not have symbol" do <del> foo = Mime::Type.lookup("foo") <del> assert_nil foo.to_sym <del> assert_equal ["foo"], ActionView::Template::Text.new("", foo).formats <del> end <del> <del> test "formats returns string for unknown MIME type" do <del> assert_equal ["foo"], ActionView::Template::Text.new("", "foo").formats <add> test "formats always return :text" do <add> assert_equal [:text], ActionView::Template::Text.new("").formats <ide> end <ide> end
2
Java
Java
remove unused field
883f4651bc5cc744cd10bc646e41a981c71c4b1a
<ide><path>ReactAndroid/src/main/java/com/facebook/react/fabric/mounting/SurfaceMountingManager.java <ide> public class SurfaceMountingManager { <ide> <ide> // This is null *until* StopSurface is called. <ide> private Set<Integer> mTagSetForStoppedSurface; <del> private long mLastSuccessfulQueryTime = -1; <ide> <ide> private final int mSurfaceId; <ide> <ide> public boolean getViewExists(int tag) { <ide> // deleted. This helps distinguish between scenarios where an invalid tag is referenced, vs <ide> // race conditions where an imperative method is called on a tag during/just after StopSurface. <ide> if (mTagSetForStoppedSurface != null && mTagSetForStoppedSurface.contains(tag)) { <del> mLastSuccessfulQueryTime = System.currentTimeMillis(); <ide> return true; <ide> } <ide> if (mTagToViewState == null) { <ide> public void run() { <ide> } <ide> <ide> // Evict all views from cache and memory <del> mLastSuccessfulQueryTime = System.currentTimeMillis(); <ide> mTagSetForStoppedSurface = mTagToViewState.keySet(); <ide> mTagToViewState = null; <ide> mJSResponderHandler = null;
1
Python
Python
fix compatibility with old maxpooling interface
ca4fc2e72f4a841e9230d9e4241893f0381f3fdf
<ide><path>keras/layers/convolutional.py <ide> def get_config(self): <ide> class MaxPooling1D(Layer): <ide> def __init__(self, pool_length=2, stride=1, ignore_border=True): <ide> super(MaxPooling1D, self).__init__() <del> if type(stride) is not int or not stride: <add> if stride is None: <add> stride = pool_length <add> if type(stride) not stride: <ide> raise Exception('"stride" argument in MaxPooling1D should be an int > 0.') <ide> self.pool_length = pool_length <ide> self.stride = stride <ide> def get_config(self): <ide> <ide> <ide> class MaxPooling2D(Layer): <del> def __init__(self, poolsize=(2, 2), stride=(1, 1), ignore_border=True): <add> def __init__(self, poolsize=(2, 2), stride=(2, 2), ignore_border=True): <ide> super(MaxPooling2D, self).__init__() <ide> self.input = T.tensor4() <ide> self.poolsize = tuple(poolsize) <add> if stride is None: <add> stride = self.poolsize <ide> self.stride = tuple(stride) <ide> self.ignore_border = ignore_border <ide>
1
Javascript
Javascript
remove sharedcreds in server constructor
161182ec05191b0e1f6e58ced1c8b567393aceda
<ide><path>lib/_tls_wrap.js <ide> function Server(options, listener) { <ide> // Handle option defaults: <ide> this.setOptions(options); <ide> <del> var sharedCreds = tls.createSecureContext({ <add> this._sharedCreds = tls.createSecureContext({ <ide> pfx: this.pfx, <ide> key: this.key, <ide> passphrase: this.passphrase, <ide> function Server(options, listener) { <ide> crl: this.crl, <ide> sessionIdContext: this.sessionIdContext <ide> }); <del> this._sharedCreds = sharedCreds; <ide> <ide> this[kHandshakeTimeout] = options.handshakeTimeout || (120 * 1000); <ide> this[kSNICallback] = options.SNICallback; <ide> function Server(options, listener) { <ide> } <ide> <ide> if (this.sessionTimeout) { <del> sharedCreds.context.setSessionTimeout(this.sessionTimeout); <add> this._sharedCreds.context.setSessionTimeout(this.sessionTimeout); <ide> } <ide> <ide> if (this.ticketKeys) { <del> sharedCreds.context.setTicketKeys(this.ticketKeys); <add> this._sharedCreds.context.setTicketKeys(this.ticketKeys); <ide> } <ide> <ide> // constructor call
1
Javascript
Javascript
add tests for weird moment clones
d08df925a5e5c132b366957567c484724a29cd12
<ide><path>test/moment/create.js <ide> exports.create = { <ide> test.done(); <ide> }, <ide> <add> "cloning moment works with weird clones" : function (test) { <add> var extend = function(a, b) { <add> var i; <add> for (i in b) { <add> a[i] = b[i]; <add> } <add> return a; <add> }, <add> now = moment(); <add> <add> test.expect(1); <add> test.equal(+extend({}, now).clone(), +now, "cloning extend-ed now is now"); <add> test.done(); <add> }, <add> <ide> "undefined" : function (test) { <ide> test.expect(1); <ide> test.ok(moment().toDate() instanceof Date, "undefined"); <ide><path>test/moment/is_moment.js <ide> var moment = require('../../moment'); <ide> <ide> exports.is_moment = { <ide> "is moment object": function (test) { <del> test.expect(11); <add> test.expect(12); <ide> <del> var MyObj = function () {}; <add> var MyObj = function () {}, <add> extend = function(a, b) { <add> var i; <add> for (i in b) { <add> a[i] = b[i]; <add> } <add> return a; <add> }; <ide> MyObj.prototype.toDate = function () { <ide> return new Date(); <ide> }; <ide> <ide> test.ok(moment.isMoment(moment()), 'simple moment object'); <ide> test.ok(moment.isMoment(moment('invalid date')), 'invalid moment object'); <add> test.ok(moment.isMoment(extend({}, moment())), 'externally cloned moments are moments'); <ide> <ide> test.ok(!moment.isMoment(new MyObj()), 'myObj is not moment object'); <ide> test.ok(!moment.isMoment(moment), 'moment function is not moment object');
2
PHP
PHP
pass primary to insertgetid
340810a187804801f078e054172750e7d998e7f4
<ide><path>src/Illuminate/Database/Eloquent/Model.php <ide> public function save() <ide> { <ide> if ($this->incrementing) <ide> { <del> $this->$keyName = $query->insertGetId($this->attributes); <add> $this->$keyName = $query->insertGetId($this->attributes, $keyName); <ide> } <ide> else <ide> { <ide><path>tests/Database/DatabaseEloquentModelTest.php <ide> public function testInsertProcess() <ide> { <ide> $model = $this->getMock('EloquentModelStub', array('newQuery', 'updateTimestamps')); <ide> $query = m::mock('Illuminate\Database\Eloquent\Builder'); <del> $query->shouldReceive('insertGetId')->once()->with(array('name' => 'taylor'))->andReturn(1); <add> $query->shouldReceive('insertGetId')->once()->with(array('name' => 'taylor'), 'id')->andReturn(1); <ide> $model->expects($this->once())->method('newQuery')->will($this->returnValue($query)); <ide> $model->expects($this->once())->method('updateTimestamps'); <ide>
2
Javascript
Javascript
ignore link routes in lang redirects
1981e5b1a97150db2a0b5ad5ce0911657f81a2e7
<ide><path>server/utils/lang-passthrough-urls.js <ide> export default [ <ide> 'auth', <del> 'services' <add> 'services', <add> 'link' <ide> ].reduce((throughs, route) => { <del> throughs[route] = true; return throughs; <add> throughs[route] = true; <add> return throughs; <ide> }, {});
1
Go
Go
move systemd code into pkg
ec43ec50b44cff3f043c78cad97466c68e2ba8cd
<ide><path>pkg/cgroups/apply_nosystemd.go <del>// +build !linux <del> <del>package cgroups <del> <del>import ( <del> "fmt" <del>) <del> <del>func useSystemd() bool { <del> return false <del>} <del> <del>func systemdApply(c *Cgroup, pid int) (ActiveCgroup, error) { <del> return nil, fmt.Errorf("Systemd not supported") <del>} <ide><path>pkg/cgroups/cgroups.go <ide> type Cgroup struct { <ide> type ActiveCgroup interface { <ide> Cleanup() error <ide> } <del> <del>func Apply(c *Cgroup, pid int) (ActiveCgroup, error) { <del> // We have two implementation of cgroups support, one is based on <del> // systemd and the dbus api, and one is based on raw cgroup fs operations <del> // following the pre-single-writer model docs at: <del> // http://www.freedesktop.org/wiki/Software/systemd/PaxControlGroups/ <del> <del> if useSystemd() { <del> return systemdApply(c, pid) <del> } else { <del> return rawApply(c, pid) <del> } <del>} <ide><path>pkg/cgroups/systemd/apply_nosystemd.go <add>// +build !linux <add> <add>package systemd <add> <add>import ( <add> "fmt" <add> "github.com/dotcloud/docker/pkg/cgroups" <add>) <add> <add>func UseSystemd() bool { <add> return false <add>} <add> <add>func systemdApply(c *Cgroup, pid int) (cgroups.ActiveCgroup, error) { <add> return nil, fmt.Errorf("Systemd not supported") <add>} <add><path>pkg/cgroups/systemd/apply_systemd.go <del><path>pkg/cgroups/apply_systemd.go <ide> // +build linux <ide> <del>package cgroups <add>package systemd <ide> <ide> import ( <ide> "fmt" <del> systemd1 "github.com/coreos/go-systemd/dbus" <del> "github.com/dotcloud/docker/pkg/systemd" <del> "github.com/godbus/dbus" <add> "io/ioutil" <ide> "path/filepath" <ide> "strings" <ide> "sync" <add> <add> systemd1 "github.com/coreos/go-systemd/dbus" <add> "github.com/dotcloud/docker/pkg/cgroups" <add> "github.com/dotcloud/docker/pkg/systemd" <add> "github.com/godbus/dbus" <ide> ) <ide> <ide> type systemdCgroup struct { <ide> } <ide> <add>type DeviceAllow struct { <add> Node string <add> Permissions string <add>} <add> <ide> var ( <ide> connLock sync.Mutex <ide> theConn *systemd1.Conn <ide> hasStartTransientUnit bool <ide> ) <ide> <del>func useSystemd() bool { <add>func UseSystemd() bool { <ide> if !systemd.SdBooted() { <ide> return false <ide> } <ide> func useSystemd() bool { <ide> } <ide> } <ide> } <del> <ide> return hasStartTransientUnit <ide> } <ide> <del>type DeviceAllow struct { <del> Node string <del> Permissions string <del>} <del> <ide> func getIfaceForUnit(unitName string) string { <ide> if strings.HasSuffix(unitName, ".scope") { <ide> return "Scope" <ide> func getIfaceForUnit(unitName string) string { <ide> return "Unit" <ide> } <ide> <del>func systemdApply(c *Cgroup, pid int) (ActiveCgroup, error) { <del> unitName := c.Parent + "-" + c.Name + ".scope" <del> slice := "system.slice" <del> <del> var properties []systemd1.Property <add>func Apply(c *cgroups.Cgroup, pid int) (cgroups.ActiveCgroup, error) { <add> var ( <add> unitName = c.Parent + "-" + c.Name + ".scope" <add> slice = "system.slice" <add> properties []systemd1.Property <add> ) <ide> <ide> for _, v := range c.UnitProperties { <ide> switch v[0] { <ide> func systemdApply(c *Cgroup, pid int) (ActiveCgroup, error) { <ide> properties = append(properties, <ide> systemd1.Property{"Slice", dbus.MakeVariant(slice)}, <ide> systemd1.Property{"Description", dbus.MakeVariant("docker container " + c.Name)}, <del> systemd1.Property{"PIDs", dbus.MakeVariant([]uint32{uint32(pid)})}) <add> systemd1.Property{"PIDs", dbus.MakeVariant([]uint32{uint32(pid)})}, <add> ) <ide> <ide> if !c.DeviceAccess { <ide> properties = append(properties, <ide> func systemdApply(c *Cgroup, pid int) (ActiveCgroup, error) { <ide> cgroup := props["ControlGroup"].(string) <ide> <ide> if !c.DeviceAccess { <del> mountpoint, err := FindCgroupMountpoint("devices") <add> mountpoint, err := cgroups.FindCgroupMountpoint("devices") <ide> if err != nil { <ide> return nil, err <ide> } <ide> <ide> path := filepath.Join(mountpoint, cgroup) <ide> <ide> // /dev/pts/* <del> if err := writeFile(path, "devices.allow", "c 136:* rwm"); err != nil { <add> if err := ioutil.WriteFile(filepath.Join(path, "devices.allow"), []byte("c 136:* rwm"), 0700); err != nil { <ide> return nil, err <ide> } <ide> // tuntap <del> if err := writeFile(path, "devices.allow", "c 10:200 rwm"); err != nil { <add> if err := ioutil.WriteFile(filepath.Join(path, "devices.allow"), []byte("c 10:200 rwm"), 0700); err != nil { <ide> return nil, err <ide> } <ide> } <del> <ide> return &systemdCgroup{}, nil <ide> } <ide> <ide><path>pkg/libcontainer/nsinit/exec.go <ide> import ( <ide> "syscall" <ide> <ide> "github.com/dotcloud/docker/pkg/cgroups" <add> "github.com/dotcloud/docker/pkg/cgroups/systemd" <ide> "github.com/dotcloud/docker/pkg/libcontainer" <ide> "github.com/dotcloud/docker/pkg/libcontainer/network" <ide> "github.com/dotcloud/docker/pkg/system" <ide> func (ns *linuxNs) Exec(container *libcontainer.Container, term Terminal, args [ <ide> <ide> func (ns *linuxNs) SetupCgroups(container *libcontainer.Container, nspid int) (cgroups.ActiveCgroup, error) { <ide> if container.Cgroups != nil { <del> return cgroups.Apply(container.Cgroups, nspid) <add> c := container.Cgroups <add> if systemd.UseSystemd() { <add> return systemd.Apply(c, nspid) <add> } <add> return rawApply(c, nspid) <ide> } <ide> return nil, nil <ide> }
5
PHP
PHP
add constraint class for header not contains
c160876124eb6a1633f803cecfe7fd31e02403ad
<ide><path>src/TestSuite/Constraint/Response/HeaderNotContains.php <add><?php <add>/** <add> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org) <add> * Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) <add> * <add> * Licensed under The MIT License <add> * For full copyright and license information, please see the LICENSE.txt <add> * Redistributions of files must retain the above copyright notice <add> * <add> * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) <add> * @since 3.7.0 <add> * @license http://www.opensource.org/licenses/mit-license.php MIT License <add> */ <add>namespace Cake\TestSuite\Constraint\Response; <add> <add>/** <add> * Constraint for ensuring a header does not contain a value. <add> * <add> * @internal <add> */ <add>class HeaderNotContains extends HeaderEquals <add>{ <add> <add> /** <add> * Checks assertion <add> * <add> * @param mixed $other Expected content <add> * @return bool <add> */ <add> public function matches($other) <add> { <add> return parent::matches($other) === false; <add> } <add> <add> /** <add> * Assertion message <add> * <add> * @return string <add> */ <add> public function toString() <add> { <add> return sprintf("is not in header '%s'", $this->headerName); <add> } <add>}
1
Javascript
Javascript
fix merge issue
0b69db201f88be725b6751999c09bce56951f012
<ide><path>test/hotCases/child-compiler/issue-9706/report-child-assets-loader.js <ide> module.exports = function(source) { <ide> compilerCache.set(this._compiler, childCompiler); <ide> } <ide> const callback = this.async(); <add> childCompiler.parentCompilation = this._compilation; <ide> childCompiler.runAsChild((err, entries, compilation) => { <ide> if (err) return callback(err); <ide> <ide> const result = `export const assets = ${JSON.stringify( <ide> compilation.getAssets().map(a => a.name) <ide> )};\n${source}`; <add> <ide> callback(null, result); <ide> }); <ide> };
1
Javascript
Javascript
pass the original listener added by once
706778a90247937af0af0ae668fabf03e142640b
<ide><path>lib/events.js <ide> EventEmitter.prototype.prependOnceListener = <ide> // emits a 'removeListener' event iff the listener was removed <ide> EventEmitter.prototype.removeListener = <ide> function removeListener(type, listener) { <del> var list, events, position, i; <add> var list, events, position, i, originalListener; <ide> <ide> if (typeof listener !== 'function') <ide> throw new TypeError('"listener" argument must be a function'); <ide> EventEmitter.prototype.removeListener = <ide> else { <ide> delete events[type]; <ide> if (events.removeListener) <del> this.emit('removeListener', type, listener); <add> this.emit('removeListener', type, list.listener || listener); <ide> } <ide> } else if (typeof list !== 'function') { <ide> position = -1; <ide> <ide> for (i = list.length; i-- > 0;) { <ide> if (list[i] === listener || <ide> (list[i].listener && list[i].listener === listener)) { <add> originalListener = list[i].listener; <ide> position = i; <ide> break; <ide> } <ide> EventEmitter.prototype.removeListener = <ide> } <ide> <ide> if (events.removeListener) <del> this.emit('removeListener', type, listener); <add> this.emit('removeListener', type, originalListener || listener); <ide> } <ide> <ide> return this; <ide><path>test/parallel/test-event-emitter-remove-listeners.js <ide> e6.emit('hello'); <ide> <ide> // Interal listener array [listener3] <ide> e6.emit('hello'); <add> <add>const e7 = new events.EventEmitter(); <add> <add>const listener5 = () => {}; <add> <add>e7.once('hello', listener5); <add>e7.on('removeListener', common.mustCall((eventName, listener) => { <add> assert.strictEqual(eventName, 'hello'); <add> assert.strictEqual(listener, listener5); <add>})); <add>e7.emit('hello');
2
Ruby
Ruby
rescue any systemcallerror from atomic_write
fb1250a012f6f7896d601c70ef6f8db90a76263d
<ide><path>Library/Homebrew/keg_fix_install_names.rb <ide> def relocate_install_names old_prefix, new_prefix, old_cellar, new_cellar, optio <ide> <ide> begin <ide> first.atomic_write(s) <del> rescue Errno::EACCES <add> rescue SystemCallError <ide> first.ensure_writable do <ide> first.open("wb") { |f| f.write(s) } <ide> end
1
Javascript
Javascript
add redirect for now vanished field guide articles
989cb87c00738a245ac10a5165d8a58e06278cb5
<ide><path>server/boot/redirects.js <ide> module.exports = function(app) { <ide> router.get('/nonprofit-project-instructions', function(req, res) { <ide> res.redirect( <ide> 301, <del> "https://github.com/FreeCodeCamp/freecodecamp/wiki/How-Free-Code-Camp's-Nonprofit-Projects-work" <add> "//github.com/FreeCodeCamp/freecodecamp/wiki/How-Free-Code-Camp's-Nonprofit-Projects-work" <ide> ); <ide> }); <ide> <ide> module.exports = function(app) { <ide> <ide> router.get('/privacy', function(req, res) { <ide> res.redirect( <del> 301, "https://github.com/FreeCodeCamp/freecodecamp/wiki/Free-Code-Camp's-Privacy-Policy" <add> 301, "//github.com/FreeCodeCamp/freecodecamp/wiki/Free-Code-Camp's-Privacy-Policy" <ide> ); <ide> }); <ide> <ide> router.get('/learn-to-code', function(req, res) { <ide> res.redirect(301, '/map'); <ide> }); <ide> <add> router.get('/field-guide/*', function(req, res) { <add> res.redirect(302, '//github.com/freecodecamp/freecodecamp/wiki') <add> }); <add> <ide> router.get('/about', function(req, res) { <ide> res.redirect(301, '/map'); <ide> });
1
Javascript
Javascript
suggest git apply --reject for failed upgrades
4fbd244b9a6b62e0efe1b4b5a7ec3de468f020f6
<ide><path>react-native-git-upgrade/cliEntry.js <ide> async function run(requestedVersion, cliArgs) { <ide> } catch (err) { <ide> log.warn( <ide> 'The upgrade process succeeded but there might be conflicts to be resolved. ' + <del> 'See above for the list of files that have merge conflicts.'); <add> 'See above for the list of files that have merge conflicts. ' + <add> 'If you don’t see the expected changes, try running:\n' + <add> `git apply --reject ${patchPath}`); <ide> } finally { <ide> log.info('Upgrade done'); <ide> if (cliArgs.verbose) {
1
Go
Go
implement plugin restore after daemon restart
dfd91873056c172ffc061d882da0cd18204b521a
<ide><path>cmd/dockerd/daemon_plugin_support.go <ide> import ( <ide> ) <ide> <ide> func pluginInit(config *daemon.Config, remote libcontainerd.Remote, rs registry.Service) error { <del> return plugin.Init(config.Root, config.ExecRoot, remote, rs) <add> return plugin.Init(config.Root, config.ExecRoot, remote, rs, config.LiveRestore) <ide> } <ide><path>plugin/backend.go <ide> func (pm *Manager) Inspect(name string) (tp types.Plugin, err error) { <ide> if err != nil { <ide> return tp, err <ide> } <del> return p.p, nil <add> return p.P, nil <ide> } <ide> <ide> // Pull pulls a plugin and enables it. <ide> func (pm *Manager) Pull(name string, metaHeader http.Header, authConfig *types.A <ide> pm.save() <ide> pm.Unlock() <ide> <del> return computePrivileges(&p.p.Manifest), nil <add> return computePrivileges(&p.P.Manifest), nil <ide> } <ide> <ide> // List displays the list of plugins and associated metadata. <ide> func (pm *Manager) List() ([]types.Plugin, error) { <ide> out := make([]types.Plugin, 0, len(pm.plugins)) <ide> for _, p := range pm.plugins { <del> out = append(out, p.p) <add> out = append(out, p.P) <ide> } <ide> return out, nil <ide> } <ide> <ide> // Push pushes a plugin to the store. <ide> func (pm *Manager) Push(name string, metaHeader http.Header, authConfig *types.AuthConfig) error { <ide> p, err := pm.get(name) <del> dest := filepath.Join(pm.libRoot, p.p.ID) <add> dest := filepath.Join(pm.libRoot, p.P.ID) <ide> config, err := os.Open(filepath.Join(dest, "manifest.json")) <ide> if err != nil { <ide> return err <ide><path>plugin/manager.go <ide> func (e ErrInadequateCapability) Error() string { <ide> <ide> type plugin struct { <ide> //sync.RWMutex TODO <del> p types.Plugin <add> P types.Plugin `json:"plugin"` <ide> client *plugins.Client <ide> restartManager restartmanager.RestartManager <ide> stateSourcePath string <ide> func (p *plugin) Client() *plugins.Client { <ide> } <ide> <ide> func (p *plugin) Name() string { <del> name := p.p.Name <del> if len(p.p.Tag) > 0 { <add> name := p.P.Name <add> if len(p.P.Tag) > 0 { <ide> // TODO: this feels hacky, maybe we should be storing the distribution reference rather than splitting these <del> name += ":" + p.p.Tag <add> name += ":" + p.P.Tag <ide> } <ide> return name <ide> } <ide> <ide> func (pm *Manager) newPlugin(ref reference.Named, id string) *plugin { <ide> p := &plugin{ <del> p: types.Plugin{ <add> P: types.Plugin{ <ide> Name: ref.Name(), <ide> ID: id, <ide> }, <ide> stateSourcePath: filepath.Join(pm.libRoot, id, "state"), <ide> runtimeSourcePath: filepath.Join(pm.runRoot, id), <ide> } <ide> if ref, ok := ref.(reference.NamedTagged); ok { <del> p.p.Tag = ref.Tag() <add> p.P.Tag = ref.Tag() <ide> } <ide> return p <ide> } <ide> <del>// TODO: figure out why save() doesn't json encode *plugin object <add>func (pm *Manager) restorePlugin(p *plugin) error { <add> p.stateSourcePath = filepath.Join(pm.libRoot, p.P.ID, "state") <add> p.runtimeSourcePath = filepath.Join(pm.runRoot, p.P.ID) <add> if p.P.Active { <add> return pm.restore(p) <add> } <add> return nil <add>} <add> <ide> type pluginMap map[string]*plugin <ide> <ide> // Manager controls the plugin subsystem. <ide> type Manager struct { <ide> containerdClient libcontainerd.Client <ide> registryService registry.Service <ide> handleLegacy bool <add> liveRestore bool <ide> } <ide> <ide> // GetManager returns the singleton plugin Manager <ide> func GetManager() *Manager { <ide> <ide> // Init (was NewManager) instantiates the singleton Manager. <ide> // TODO: revert this to NewManager once we get rid of all the singletons. <del>func Init(root, execRoot string, remote libcontainerd.Remote, rs registry.Service) (err error) { <add>func Init(root, execRoot string, remote libcontainerd.Remote, rs registry.Service, liveRestore bool) (err error) { <ide> if manager != nil { <ide> return nil <ide> } <ide> func Init(root, execRoot string, remote libcontainerd.Remote, rs registry.Servic <ide> handlers: make(map[string]func(string, *plugins.Client)), <ide> registryService: rs, <ide> handleLegacy: true, <add> liveRestore: liveRestore, <ide> } <ide> if err := os.MkdirAll(manager.runRoot, 0700); err != nil { <ide> return err <ide> } <del> if err := manager.init(); err != nil { <del> return err <del> } <ide> manager.containerdClient, err = remote.Client(manager) <ide> if err != nil { <ide> return err <ide> } <add> if err := manager.init(); err != nil { <add> return err <add> } <ide> return nil <ide> } <ide> <ide> func FindWithCapability(capability string) ([]Plugin, error) { <ide> defer manager.RUnlock() <ide> pluginLoop: <ide> for _, p := range manager.plugins { <del> for _, typ := range p.p.Manifest.Interface.Types { <add> for _, typ := range p.P.Manifest.Interface.Types { <ide> if typ.Capability != capability || typ.Prefix != "docker" { <ide> continue pluginLoop <ide> } <ide> func LookupWithCapability(name, capability string) (Plugin, error) { <ide> } <ide> <ide> capability = strings.ToLower(capability) <del> for _, typ := range p.p.Manifest.Interface.Types { <add> for _, typ := range p.P.Manifest.Interface.Types { <ide> if typ.Capability == capability && typ.Prefix == "docker" { <ide> return p, nil <ide> } <ide> func (pm *Manager) init() error { <ide> } <ide> return err <ide> } <del> // TODO: Populate pm.plugins <del> if err := json.NewDecoder(dt).Decode(&pm.nameToID); err != nil { <add> <add> if err := json.NewDecoder(dt).Decode(&pm.plugins); err != nil { <ide> return err <ide> } <del> // FIXME: validate, restore <ide> <del> return nil <add> var group sync.WaitGroup <add> group.Add(len(pm.plugins)) <add> for _, p := range pm.plugins { <add> go func(p *plugin) { <add> defer group.Done() <add> if err := pm.restorePlugin(p); err != nil { <add> logrus.Errorf("Error restoring plugin '%s': %s", p.Name(), err) <add> return <add> } <add> <add> pm.Lock() <add> pm.nameToID[p.Name()] = p.P.ID <add> requiresManualRestore := !pm.liveRestore && p.P.Active <add> pm.Unlock() <add> <add> if requiresManualRestore { <add> // if liveRestore is not enabled, the plugin will be stopped now so we should enable it <add> if err := pm.enable(p); err != nil { <add> logrus.Errorf("Error restoring plugin '%s': %s", p.Name(), err) <add> } <add> } <add> }(p) <add> group.Wait() <add> } <add> return pm.save() <ide> } <ide> <ide> func (pm *Manager) initPlugin(p *plugin) error { <del> dt, err := os.Open(filepath.Join(pm.libRoot, p.p.ID, "manifest.json")) <add> dt, err := os.Open(filepath.Join(pm.libRoot, p.P.ID, "manifest.json")) <ide> if err != nil { <ide> return err <ide> } <del> err = json.NewDecoder(dt).Decode(&p.p.Manifest) <add> err = json.NewDecoder(dt).Decode(&p.P.Manifest) <ide> dt.Close() <ide> if err != nil { <ide> return err <ide> } <ide> <del> p.p.Config.Mounts = make([]types.PluginMount, len(p.p.Manifest.Mounts)) <del> for i, mount := range p.p.Manifest.Mounts { <del> p.p.Config.Mounts[i] = mount <add> p.P.Config.Mounts = make([]types.PluginMount, len(p.P.Manifest.Mounts)) <add> for i, mount := range p.P.Manifest.Mounts { <add> p.P.Config.Mounts[i] = mount <ide> } <del> p.p.Config.Env = make([]string, 0, len(p.p.Manifest.Env)) <del> for _, env := range p.p.Manifest.Env { <add> p.P.Config.Env = make([]string, 0, len(p.P.Manifest.Env)) <add> for _, env := range p.P.Manifest.Env { <ide> if env.Value != nil { <del> p.p.Config.Env = append(p.p.Config.Env, fmt.Sprintf("%s=%s", env.Name, *env.Value)) <add> p.P.Config.Env = append(p.P.Config.Env, fmt.Sprintf("%s=%s", env.Name, *env.Value)) <ide> } <ide> } <del> copy(p.p.Config.Args, p.p.Manifest.Args.Value) <add> copy(p.P.Config.Args, p.P.Manifest.Args.Value) <ide> <del> f, err := os.Create(filepath.Join(pm.libRoot, p.p.ID, "plugin-config.json")) <add> f, err := os.Create(filepath.Join(pm.libRoot, p.P.ID, "plugin-config.json")) <ide> if err != nil { <ide> return err <ide> } <del> err = json.NewEncoder(f).Encode(&p.p.Config) <add> err = json.NewEncoder(f).Encode(&p.P.Config) <ide> f.Close() <ide> return err <ide> } <ide> <ide> func (pm *Manager) remove(p *plugin) error { <del> if p.p.Active { <add> if p.P.Active { <ide> return fmt.Errorf("plugin %s is active", p.Name()) <ide> } <ide> pm.Lock() // fixme: lock single record <ide> defer pm.Unlock() <ide> os.RemoveAll(p.stateSourcePath) <del> delete(pm.plugins, p.p.ID) <add> delete(pm.plugins, p.P.ID) <ide> delete(pm.nameToID, p.Name()) <ide> pm.save() <ide> return nil <ide> func (pm *Manager) set(p *plugin, args []string) error { <ide> func (pm *Manager) save() error { <ide> filePath := filepath.Join(pm.libRoot, "plugins.json") <ide> <del> jsonData, err := json.Marshal(pm.nameToID) <add> jsonData, err := json.Marshal(pm.plugins) <ide> if err != nil { <ide> logrus.Debugf("Error in json.Marshal: %v", err) <ide> return err <ide><path>plugin/manager_linux.go <ide> func (pm *Manager) enable(p *plugin) error { <ide> } <ide> <ide> p.restartManager = restartmanager.New(container.RestartPolicy{Name: "always"}, 0) <del> if err := pm.containerdClient.Create(p.p.ID, libcontainerd.Spec(*spec), libcontainerd.WithRestartManager(p.restartManager)); err != nil { // POC-only <add> if err := pm.containerdClient.Create(p.P.ID, libcontainerd.Spec(*spec), libcontainerd.WithRestartManager(p.restartManager)); err != nil { // POC-only <ide> return err <ide> } <ide> <del> socket := p.p.Manifest.Interface.Socket <add> socket := p.P.Manifest.Interface.Socket <ide> p.client, err = plugins.NewClient("unix://"+filepath.Join(p.runtimeSourcePath, socket), nil) <ide> if err != nil { <ide> return err <ide> func (pm *Manager) enable(p *plugin) error { <ide> //TODO: check net.Dial <ide> <ide> pm.Lock() // fixme: lock single record <del> p.p.Active = true <add> p.P.Active = true <ide> pm.save() <ide> pm.Unlock() <ide> <del> for _, typ := range p.p.Manifest.Interface.Types { <add> for _, typ := range p.P.Manifest.Interface.Types { <ide> if handler := pm.handlers[typ.String()]; handler != nil { <ide> handler(p.Name(), p.Client()) <ide> } <ide> func (pm *Manager) enable(p *plugin) error { <ide> return nil <ide> } <ide> <add>func (pm *Manager) restore(p *plugin) error { <add> p.restartManager = restartmanager.New(container.RestartPolicy{Name: "always"}, 0) <add> return pm.containerdClient.Restore(p.P.ID, libcontainerd.WithRestartManager(p.restartManager)) <add>} <add> <ide> func (pm *Manager) initSpec(p *plugin) (*specs.Spec, error) { <ide> s := oci.DefaultSpec() <ide> <del> rootfs := filepath.Join(pm.libRoot, p.p.ID, "rootfs") <add> rootfs := filepath.Join(pm.libRoot, p.P.ID, "rootfs") <ide> s.Root = specs.Root{ <ide> Path: rootfs, <ide> Readonly: false, // TODO: all plugins should be readonly? settable in manifest? <ide> } <ide> <del> mounts := append(p.p.Config.Mounts, types.PluginMount{ <add> mounts := append(p.P.Config.Mounts, types.PluginMount{ <ide> Source: &p.runtimeSourcePath, <ide> Destination: defaultPluginRuntimeDestination, <ide> Type: "bind", <ide> func (pm *Manager) initSpec(p *plugin) (*specs.Spec, error) { <ide> s.Mounts = append(s.Mounts, m) <ide> } <ide> <del> envs := make([]string, 1, len(p.p.Config.Env)+1) <add> envs := make([]string, 1, len(p.P.Config.Env)+1) <ide> envs[0] = "PATH=" + system.DefaultPathEnv <del> envs = append(envs, p.p.Config.Env...) <add> envs = append(envs, p.P.Config.Env...) <ide> <del> args := append(p.p.Manifest.Entrypoint, p.p.Config.Args...) <add> args := append(p.P.Manifest.Entrypoint, p.P.Config.Args...) <ide> s.Process = specs.Process{ <ide> Terminal: false, <ide> Args: args, <ide> func (pm *Manager) disable(p *plugin) error { <ide> if err := p.restartManager.Cancel(); err != nil { <ide> logrus.Error(err) <ide> } <del> if err := pm.containerdClient.Signal(p.p.ID, int(syscall.SIGKILL)); err != nil { <add> if err := pm.containerdClient.Signal(p.P.ID, int(syscall.SIGKILL)); err != nil { <ide> logrus.Error(err) <ide> } <ide> os.RemoveAll(p.runtimeSourcePath) <ide> pm.Lock() // fixme: lock single record <ide> defer pm.Unlock() <del> p.p.Active = false <add> p.P.Active = false <ide> pm.save() <ide> return nil <ide> } <ide><path>plugin/manager_windows.go <ide> func (pm *Manager) initSpec(p *plugin) (*specs.Spec, error) { <ide> func (pm *Manager) disable(p *plugin) error { <ide> return fmt.Errorf("Not implemented") <ide> } <add> <add>func (pm *Manager) restore(p *plugin) error { <add> return fmt.Errorf("Not implemented") <add>}
5
Javascript
Javascript
remove invalid jsdoc tags for patch() method
a2c7cf9fb26637b89633cb7f33f97a9da76cfdb9
<ide><path>src/ng/http.js <ide> function $HttpProvider() { <ide> <ide> /** <ide> * @ngdoc method <del> * @name ng.$http#patch <del> * @methodOf ng.$http <add> * @name $http#patch <ide> * <ide> * @description <ide> * Shortcut method to perform `PATCH` request.
1
Javascript
Javascript
add explanation for processdependency method
a7b85301a71e4ddfcfaf53c5f2b82db69dbe1ac5
<ide><path>lib/Compilation.js <ide> BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si <ide> this.moduleGraph.setParents(dep, currentBlock, module); <ide> const resourceIdent = dep.getResourceIdentifier(); <ide> if (resourceIdent) { <add> // Here webpack is using heuristic that assumes <add> // mostly esm dependencies would be used <add> // so we don't allocate extra string for them <ide> const cacheKey = <ide> dep.category === esmDependencyCategory <ide> ? resourceIdent
1
Go
Go
fix tests depending on entrypoint split behavior
86ab343c3e98ded1ee1b12f04396ae011a0e6de6
<ide><path>integration-cli/docker_cli_run_test.go <ide> func (s *DockerSuite) TestRunWithVolumesFromExited(c *check.C) { <ide> <ide> // Create a file in a volume <ide> if daemonPlatform == "windows" { <del> out, exitCode = dockerCmd(c, "run", "--name", "test-data", "--volume", `c:\some\dir`, WindowsBaseImage, `cmd /c echo hello > c:\some\dir\file`) <add> out, exitCode = dockerCmd(c, "run", "--name", "test-data", "--volume", `c:\some\dir`, WindowsBaseImage, "cmd", "/c", `echo hello > c:\some\dir\file`) <ide> } else { <ide> out, exitCode = dockerCmd(c, "run", "--name", "test-data", "--volume", "/some/dir", "busybox", "touch", "/some/dir/file") <ide> } <ide> func (s *DockerSuite) TestRunWithVolumesFromExited(c *check.C) { <ide> <ide> // Read the file from another container using --volumes-from to access the volume in the second container <ide> if daemonPlatform == "windows" { <del> out, exitCode = dockerCmd(c, "run", "--volumes-from", "test-data", WindowsBaseImage, `cmd /c type c:\some\dir\file`) <add> out, exitCode = dockerCmd(c, "run", "--volumes-from", "test-data", WindowsBaseImage, "cmd", "/c", `type c:\some\dir\file`) <ide> } else { <ide> out, exitCode = dockerCmd(c, "run", "--volumes-from", "test-data", "busybox", "cat", "/some/dir/file") <ide> } <ide> func (s *DockerSuite) TestRunWithBadDevice(c *check.C) { <ide> func (s *DockerSuite) TestRunEntrypoint(c *check.C) { <ide> name := "entrypoint" <ide> <del> // Note Windows does not have an echo.exe built in. <del> var out, expected string <del> if daemonPlatform == "windows" { <del> out, _ = dockerCmd(c, "run", "--name", name, "--entrypoint", "cmd /s /c echo", "busybox", "foobar") <del> expected = "foobar\r\n" <del> } else { <del> out, _ = dockerCmd(c, "run", "--name", name, "--entrypoint", "/bin/echo", "busybox", "-n", "foobar") <del> expected = "foobar" <del> } <add> out, _ := dockerCmd(c, "run", "--name", name, "--entrypoint", "echo", "busybox", "-n", "foobar") <add> expected := "foobar" <ide> <ide> if out != expected { <ide> c.Fatalf("Output should be %q, actual out: %q", expected, out) <ide> func (s *DockerSuite) TestRunTTYWithPipe(c *check.C) { <ide> <ide> func (s *DockerSuite) TestRunNonLocalMacAddress(c *check.C) { <ide> addr := "00:16:3E:08:00:50" <del> cmd := "ifconfig" <del> image := "busybox" <add> args := []string{"run", "--mac-address", addr} <ide> expected := addr <ide> <del> if daemonPlatform == "windows" { <del> cmd = "ipconfig /all" <del> image = WindowsBaseImage <add> if daemonPlatform != "windows" { <add> args = append(args, "busybox", "ifconfig") <add> } else { <add> args = append(args, WindowsBaseImage, "ipconfig", "/all") <ide> expected = strings.Replace(strings.ToUpper(addr), ":", "-", -1) <del> <ide> } <ide> <del> if out, _ := dockerCmd(c, "run", "--mac-address", addr, image, cmd); !strings.Contains(out, expected) { <add> if out, _ := dockerCmd(c, args...); !strings.Contains(out, expected) { <ide> c.Fatalf("Output should have contained %q: %s", expected, out) <ide> } <ide> }
1
PHP
PHP
add use ...\str to helpers.php
1d4265dbfb4da567943c196b4e7162002ed54bde
<ide><path>src/Illuminate/Support/helpers.php <ide> <?php <ide> <add>use Illuminate\Support\Str; <add> <ide> if ( ! function_exists('action')) <ide> { <ide> /** <ide> function base_path($path = '') <ide> */ <ide> function camel_case($value) <ide> { <del> return Illuminate\Support\Str::camel($value); <add> return Str::camel($value); <ide> } <ide> } <ide> <ide> function e($value) <ide> */ <ide> function ends_with($haystack, $needle) <ide> { <del> return Illuminate\Support\Str::endsWith($haystack, $needle); <add> return Str::endsWith($haystack, $needle); <ide> } <ide> } <ide> <ide> function secure_url($path, $parameters = array()) <ide> */ <ide> function snake_case($value, $delimiter = '_') <ide> { <del> return Illuminate\Support\Str::snake($value, $delimiter); <add> return Str::snake($value, $delimiter); <ide> } <ide> } <ide> <ide> function snake_case($value, $delimiter = '_') <ide> */ <ide> function starts_with($haystack, $needle) <ide> { <del> return Illuminate\Support\Str::startsWith($haystack, $needle); <add> return Str::startsWith($haystack, $needle); <ide> } <ide> } <ide> <ide> function storage_path($path = '') <ide> */ <ide> function str_contains($haystack, $needle) <ide> { <del> return Illuminate\Support\Str::contains($haystack, $needle); <add> return Str::contains($haystack, $needle); <ide> } <ide> } <ide> <ide> function str_contains($haystack, $needle) <ide> */ <ide> function str_finish($value, $cap) <ide> { <del> return Illuminate\Support\Str::finish($value, $cap); <add> return Str::finish($value, $cap); <ide> } <ide> } <ide> <ide> function str_finish($value, $cap) <ide> */ <ide> function str_is($pattern, $value) <ide> { <del> return Illuminate\Support\Str::is($pattern, $value); <add> return Str::is($pattern, $value); <ide> } <ide> } <ide> <ide> function str_is($pattern, $value) <ide> */ <ide> function str_limit($value, $limit = 100, $end = '...') <ide> { <del> return Illuminate\Support\Str::limit($value, $limit, $end); <add> return Str::limit($value, $limit, $end); <ide> } <ide> } <ide> <ide> function str_limit($value, $limit = 100, $end = '...') <ide> */ <ide> function str_plural($value, $count = 2) <ide> { <del> return Illuminate\Support\Str::plural($value, $count); <add> return Str::plural($value, $count); <ide> } <ide> } <ide> <ide> function str_plural($value, $count = 2) <ide> */ <ide> function str_random($length = 16) <ide> { <del> return Illuminate\Support\Str::random($length); <add> return Str::random($length); <ide> } <ide> } <ide> <ide> function str_replace_array($search, array $replace, $subject) <ide> */ <ide> function str_singular($value) <ide> { <del> return Illuminate\Support\Str::singular($value); <add> return Str::singular($value); <ide> } <ide> } <ide> <ide> function str_singular($value) <ide> */ <ide> function studly_case($value) <ide> { <del> return Illuminate\Support\Str::studly($value); <add> return Str::studly($value); <ide> } <ide> } <ide>
1
PHP
PHP
use str_random instead of str_shuffle
2b59a8ff744a490a9f4f5f16057386ef7b79f6e6
<ide><path>src/Illuminate/Auth/Passwords/DatabaseTokenRepository.php <ide> public function deleteExpired() <ide> */ <ide> public function createNewToken(CanResetPasswordContract $user) <ide> { <del> $email = $user->getEmailForPasswordReset(); <del> <del> $value = str_shuffle(sha1($email.spl_object_hash($this).microtime(true))); <del> <del> return hash_hmac('sha1', $value, $this->hashKey); <add> return hash_hmac('sha256', str_random(40), $this->hashKey); <ide> } <ide> <ide> /**
1
Python
Python
enhance percpu/cpu switch
74ebc9bff8fc2ed365c64ad30130131e050bc493
<ide><path>glances/outputs/glances_curses.py <ide> class _GlancesCurses(object): <ide> 'cpu_times', 'io_counters', 'name'] <ide> <ide> # Define top menu <del> _top = ['quicklook', 'cpu', 'gpu', 'mem', 'memswap', 'load'] <add> _top = ['quicklook', 'cpu', 'percpu', 'gpu', 'mem', 'memswap', 'load'] <ide> <ide> # Define left sidebar <ide> _left_sidebar = ['network', 'wifi', 'ports', 'diskio', 'fs', <ide> def __get_stat_display(self, stats, layer): <ide> ret[p] = stats.get_plugin(p).get_stats_display(args=self.args, <ide> max_width=plugin_max_width) <ide> <del> if self.args.percpu: <del> ret['cpu'] = ret['percpu'] <del> <ide> return ret <ide> <ide> def display(self, stats, cs_status=None): <ide><path>glances/plugins/glances_cpu.py <ide> def msg_curse(self, args=None, max_width=None): <ide> ret = [] <ide> <ide> # Only process if stats exist and plugin not disable <del> if not self.stats or self.is_disable(): <add> if not self.stats or self.args.percpu or self.is_disable(): <ide> return ret <ide> <ide> # Build the string message <ide><path>glances/plugins/glances_percpu.py <ide> def msg_curse(self, args=None, max_width=None): <ide> ret = [] <ide> <ide> # Only process if stats exist... <del> if not self.stats or self.is_disable(): <del> return ret <del> <del> # No per CPU stat ? Exit... <del> if not self.stats: <del> msg = 'PER CPU not available' <del> ret.append(self.curse_add_line(msg, "TITLE")) <add> if not self.stats or not self.args.percpu or self.is_disable(): <ide> return ret <ide> <ide> # Build the string message
3
Python
Python
fix error about pep8
21bce2b8e649d37da4e58dc5c8be51a50bdd60e7
<ide><path>celery/backends/elasticsearch.py <ide> def __init__(self, url=None, *args, **kwargs): <ide> self.port = port or self.port <ide> <ide> self.es_retry_on_timeout = ( <del> _get('elasticsearch_retry_on_timeout') or self.es_retry_on_timeout <del> ) <add> _get('elasticsearch_retry_on_timeout') or self.es_retry_on_timeout <add> ) <ide> <ide> es_timeout = _get('elasticsearch_timeout') <ide> if es_timeout is not None: <ide> self.es_timeout = es_timeout <ide> <ide> es_max_retries = _get('elasticsearch_max_retries') <del> if es_max_retries is not None: <add> if es_max_retries is not None: <ide> self.es_max_retries = es_max_retries <ide> <ide> self._server = None <ide> def delete(self, key): <ide> <ide> def _get_server(self): <ide> """Connect to the Elasticsearch server.""" <del> return elasticsearch.Elasticsearch('%s:%s' % (self.host, self.port), <del> retry_on_timeout=self.es_retry_on_timeout, <del> max_retries=self.es_max_retries, <del> timeout=self.es_timeout) <add> return elasticsearch.Elasticsearch( <add> '%s:%s' % (self.host, self.port), <add> retry_on_timeout=self.es_retry_on_timeout, <add> max_retries=self.es_max_retries, <add> timeout=self.es_timeout <add> ) <ide> <ide> @property <ide> def server(self):
1
Javascript
Javascript
convert easy files to flow strict-local
e36247030575e93c1d7087cdb04e1498e9577056
<ide><path>IntegrationTests/LayoutEventsTest.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>IntegrationTests/ReactContentSizeUpdateTest.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>IntegrationTests/SimpleSnapshotTest.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>IntegrationTests/SizeFlexibilityUpdateTest.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/BugReporting/BugReporting.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/AccessibilityInfo/NativeAccessibilityInfo.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/ActivityIndicator/ActivityIndicatorViewNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/ActivityIndicator/__tests__/ActivityIndicator-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/AppleTV/NativeTVNavigationEventEmitter.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/CheckBox/AndroidCheckBoxNativeComponent.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/Clipboard/NativeClipboard.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/DatePicker/RCTDatePickerNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/DrawerAndroid/AndroidDrawerLayoutNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/DrawerAndroid/__tests__/DrawerAndroid-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/Keyboard/NativeKeyboardObserver.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/Keyboard/__tests__/Keyboard-test.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> * @emails oncall+react_native <ide> */ <ide> <ide><path>Libraries/Components/MaskedView/MaskedViewIOS.android.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/MaskedView/RCTMaskedViewNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> import type {ViewProps} from '../View/ViewPropTypes'; <ide><path>Libraries/Components/MaskedView/__tests__/MaskedViewIOS-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/Picker/AndroidDialogPickerNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/Picker/AndroidDropdownPickerNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/Picker/PickerIOS.ios.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> // This is a controlled component version of RCTPickerIOS. <ide><path>Libraries/Components/Picker/RCTPickerNativeComponent.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/Picker/__tests__/Picker-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/ProgressBarAndroid/ProgressBarAndroid.android.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/ProgressBarAndroid/ProgressBarAndroidNativeComponent.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/ProgressBarAndroid/__tests__/ProgressBarAndroid-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/ProgressViewIOS/ProgressViewIOS.ios.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/ProgressViewIOS/RCTProgressViewNativeComponent.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/RefreshControl/AndroidSwipeRefreshLayoutNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/RefreshControl/PullToRefreshViewNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/SafeAreaView/__tests__/SafeAreaView-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/ScrollView/ScrollViewViewConfig.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/ScrollView/processDecelerationRate.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/SegmentedControlIOS/RCTSegmentedControlNativeComponent.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/SegmentedControlIOS/SegmentedControlIOS.ios.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/Slider/SliderNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/Sound/NativeSoundManager.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/Sound/SoundManager.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/StaticRenderer.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/StatusBar/NativeStatusBarManagerAndroid.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/StatusBar/NativeStatusBarManagerIOS.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/Switch/AndroidSwitchNativeComponent.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/Switch/Switch.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> * @generate-docs <ide> */ <ide><path>Libraries/Components/Switch/SwitchNativeComponent.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/TextInput/AndroidTextInputNativeComponent.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/TextInput/RCTInputAccessoryViewNativeComponent.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/TextInput/__tests__/InputAccessoryView-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/ToastAndroid/NativeToastAndroid.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Components/ToastAndroid/ToastAndroid.android.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/UnimplementedViews/UnimplementedNativeViewNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Components/View/ReactNativeViewViewConfigAndroid.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Core/Devtools/__tests__/parseHermesStack-test.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> * @emails oncall+react_native <ide> */ <ide><path>Libraries/Core/Devtools/parseHermesStack.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Core/InitializeCore.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> /* globals window: true */ <ide><path>Libraries/Core/ReactFiberErrorDialog.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> export type CapturedError = { <ide><path>Libraries/Core/Timers/NativeTiming.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Core/polyfillPromise.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/DeprecatedPropTypes/DeprecatedTVViewPropTypes.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/DeprecatedPropTypes/DeprecatedViewPropTypes.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/HeapCapture/NativeHeapCapture.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Image/ImagePickerIOS.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Image/ImageProps.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Image/NativeImagePickerIOS.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Image/__tests__/Image-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Image/nativeImageSource.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Interaction/NativeFrameRateLogger.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> import type {TurboModule} from '../TurboModule/RCTExport'; <ide><path>Libraries/Linking/NativeLinking.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/LogBox/Data/LogBoxLog.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/LogBox/Data/parseLogBoxLog.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/LogBox/UI/LogBoxInspectorCodeFrame.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/LogBox/UI/LogBoxInspectorFooter.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/LogBox/UI/__tests__/LogBoxButton-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxContainer-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxInspector-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxInspectorCodeFrame-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxInspectorFooter-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxInspectorHeader-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxInspectorMesageHeader-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxInspectorReactFrames-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxInspectorSection-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxInspectorSourceMapStatus-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxInspectorStackFrame-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxInspectorStackFrames-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxLogNotification-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/LogBox/UI/__tests__/LogBoxMessage-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Modal/NativeModalManager.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Modal/RCTModalHostViewNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Modal/__tests__/Modal-test.js <ide> * <ide> * @format <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/NativeModules/specs/NativeAnimationsDebugModule.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/NativeModules/specs/NativeSourceCode.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/NewAppScreen/components/Colors.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/NewAppScreen/components/DebugInstructions.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/NewAppScreen/components/Header.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/NewAppScreen/components/LearnMoreLinks.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/NewAppScreen/components/ReloadInstructions.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/NewAppScreen/index.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Performance/NativeJSCSamplingProfiler.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/PermissionsAndroid/PermissionsAndroid.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/ReactNative/HeadlessJsTaskError.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/ReactNative/NativeHeadlessJsTaskSupport.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/ReactNative/NativeI18nManager.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Renderer/shims/ReactFeatureFlags.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Storage/NativeAsyncStorage.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Types/CodegenTypes.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Utilities/GlobalPerformanceLogger.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Utilities/HMRClientProdShim.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Utilities/LoadingView.android.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Utilities/PerformanceLoggerContext.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/Utilities/useColorScheme.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Utilities/warnOnce.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>Libraries/Vibration/NativeVibration.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>Libraries/YellowBox/Data/YellowBoxWarning.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>RNTester/NativeModuleExample/NativeScreenshotManager.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>RNTester/RCTTest/RCTSnapshotNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>RNTester/js/components/RNTesterButton.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>RNTester/js/examples/InputAccessoryView/InputAccessoryViewExample.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>RNTester/js/examples/PermissionsAndroid/PermissionsExample.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>RNTester/js/examples/ProgressBarAndroid/ProgressBarAndroidExample.android.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>RNTester/js/examples/ProgressViewIOS/ProgressViewIOSExample.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>RNTester/js/examples/ScrollView/ScrollViewAnimatedExample.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>RNTester/js/examples/Text/TextExample.android.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>RNTester/js/examples/ToastAndroid/ToastAndroidExample.android.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>RNTester/js/examples/TurboModule/SampleTurboModuleExample.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>ReactAndroid/src/androidTest/js/UIManagerTestModule.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>ReactCommon/hermes/inspector/tools/msggen/src/Converters.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>ReactCommon/hermes/inspector/tools/msggen/src/Graph.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>flow-typed/npm/metro_vx.x.x.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/babel-plugin-inline-view-configs/__test_fixtures__/failures.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/babel-plugin-inline-view-configs/__test_fixtures__/fixtures.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/ArrayPropsNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/BooleanPropNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/ColorPropNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/EdgeInsetsPropNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/EnumPropNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/EventNestedObjectPropsNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/EventPropsNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/FloatPropsNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/ImagePropNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/IntegerPropNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/InterfaceOnlyNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/MultiNativePropNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/NoPropsNoEventsNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/ObjectPropsNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/PointPropNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/components/StringPropNativeComponent.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/modules/NativeBooleanTurboModule.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/modules/NativeCallbackTurboModule.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/modules/NativeNumberTurboModule.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/modules/NativePromiseTurboModule.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/e2e/__test_fixtures__/modules/NativeStringTurboModule.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/src/cli/combine/combine-js-to-schema-cli.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/src/cli/combine/combine-js-to-schema.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/src/cli/parser/parser-cli.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/src/cli/parser/parser.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/src/cli/verify_with_old_codegen.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/src/parsers/flow/components/__test_fixtures__/failures.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const COMMANDS_DEFINED_INLINE = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const COMMANDS_DEFINED_MULTIPLE_TIMES = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const COMMANDS_DEFINED_WITHOUT_REF = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const COMMANDS_DEFINED_WITH_NULLABLE_REF = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const COMMANDS_DEFINED_WITH_MISMATCHED_METHOD_NAMES = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const COMMANDS_DEFINED_WITHOUT_METHOD_NAMES = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const NULLABLE_WITH_DEFAULT = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const NON_OPTIONAL_KEY_WITH_DEFAULT_VALUE = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROPS_CONFLICT_NAMES = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROPS_CONFLICT_WITH_SPREAD_PROPS = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROPS_SPREAD_CONFLICTS_WITH_PROPS = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROP_NUMBER_TYPE = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROP_MIXED_ENUM = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROP_ENUM_BOOLEAN = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROP_ARRAY_MIXED_ENUM = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROP_ARRAY_ENUM_BOOLEAN = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROP_ARRAY_ENUM_INT = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/src/parsers/flow/components/__test_fixtures__/fixtures.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const ONE_OF_EACH_PROP_EVENT_DEFAULT_AND_OPTIONS = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const ONE_OF_EACH_PROP_EVENT_DEFAULT_AND_OPTIONS_NO_CAST = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const NO_PROPS_EVENTS_ONLY_DEPRECATED_VIEW_CONFIG_NAME_OPTION = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const ALL_PROP_TYPES_NO_EVENTS = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const ARRAY_PROP_TYPES_NO_EVENTS = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const OBJECT_PROP_TYPES_NO_EVENTS = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROPS_ALIASED_LOCALLY = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const EVENTS_DEFINED_INLINE_WITH_ALL_TYPES = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const EVENTS_DEFINED_AS_NULL_INLINE = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROPS_AND_EVENTS_TYPES_EXPORTED = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const PROPS_AS_EXTERNAL_TYPES = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const COMMANDS_DEFINED_WITH_ALL_TYPES = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const COMMANDS_WITH_EXTERNAL_TYPES = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide> const COMMANDS_AND_EVENTS_TYPES_EXPORTED = ` <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/src/parsers/flow/components/__tests__/component-parser-test.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/src/parsers/flow/components/schema.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>packages/react-native-codegen/src/parsers/flow/modules/__test_fixtures__/failures.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULES_WITH_ARRAY_WITH_NO_TYPE_FOR_CONTENT = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULES_WITH_ARRAY_WITH_NO_TYPE_FOR_CONTENT_AS_PARAM = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULES_WITH_READ_ONLY_OBJECT_NO_TYPE_FOR_CONTENT = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULES_WITH_NOT_ONLY_METHODS = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULES_WITH_UNNAMED_PARAMS = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULES_WITH_PROMISE_WITHOUT_TYPE = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const TWO_NATIVE_MODULES_EXPORTED_WITH_DEFAULT = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const TWO_NATIVE_EXTENDING_TURBO_MODULE = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/src/parsers/flow/modules/__test_fixtures__/fixtures.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const EMPTY_NATIVE_MODULE = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_COMPLEX_OBJECTS = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_COMPLEX_OBJECTS_WITH_NULLABLE_KEY = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_BASIC_PARAM_TYPES = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_WITH_ALIASES = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_WITH_FLOAT_AND_INT32 = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_SIMPLE_OBJECT = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_NULLABLE_PARAM = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_BASIC_ARRAY = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_OBJECT_WITH_OBJECT_DEIFNED_IN_FILE_AS_PROPERTY = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_ARRAY_WITH_UNION_AND_TOUPLE = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_ARRAY_WITH_ALIAS = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_COMPLEX_ARRAY = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_PROMISE = `/** <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide> const NATIVE_MODULE_WITH_CALLBACK = ` <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/src/parsers/flow/modules/__tests__/module-parser-test.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @emails oncall+react_native <del> * @flow <add> * @flow strict-local <ide> * @format <ide> */ <ide> <ide><path>packages/react-native-codegen/src/parsers/flow/modules/schema.js <ide> * LICENSE file in the root directory of this source tree. <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> 'use strict'; <ide><path>template/App.js <ide> * https://github.com/facebook/react-native <ide> * <ide> * @format <del> * @flow <add> * @flow strict-local <ide> */ <ide> <ide> import React from 'react';
165
Text
Text
add link to primitives article
fe99d38f51cdc05ac6c564d0881b062453db60e1
<ide><path>threejs/lessons/threejs-responsive.md <ide> display and you compare this sample to those above you should <ide> notice the edges are more crisp. <ide> <ide> This article covered a very basic but fundamental topic. Next up lets quickly <del>go over the basic primitives that three.js provides. <add>[go over the basic primitives that three.js provides](threejs-primitives.html). <ide>
1
Text
Text
allow any response status for error response
2ac98e39a262f318974ad50e9e1068291b760dea
<ide><path>curriculum/challenges/english/05-apis-and-microservices/apis-and-microservices-projects/timestamp-microservice.md <ide> If the input date string is invalid, the api returns an object having the struct <ide> assert.equal(data.error.toLowerCase(), 'invalid date'); <ide> }, <ide> (xhr) => { <del> throw new Error(xhr.responseText); <add> assert(xhr.responseJSON.error.toLowerCase() === 'invalid date'); <ide> } <ide> ); <ide> ```
1
Python
Python
fix documentation rendering,
043799295f82b01235479d5b323de4598dca48a0
<ide><path>numpy/core/multiarray.py <ide> def packbits(a, axis=None, bitorder='big'): <ide> ``None`` implies packing the flattened array. <ide> bitorder : {'big', 'little'}, optional <ide> The order of the input bits. 'big' will mimic bin(val), <del> ``[0, 0, 0, 0, 0, 0, 1, 1] => 3 = 0b00000011 => ``, 'little' will <add> ``[0, 0, 0, 0, 0, 0, 1, 1] => 3 = 0b00000011``, 'little' will <ide> reverse the order so ``[1, 1, 0, 0, 0, 0, 0, 0] => 3``. <ide> Defaults to 'big'. <ide>
1
Javascript
Javascript
satisfy linter requirements
24365b95df18b79fd8f995007a5d2735fecae424
<ide><path>src/timeout-cop.js <del>var parentProcessId = process.argv[2]; <del>var timeoutInMinutes = process.argv[3]; <del>var timeoutInMilliseconds = timeoutInMinutes * 1000 * 60 <add>'use strict' <ide> <del>function exitTestRunner() { <del> process.kill(parentProcessId, "SIGINT"); <del> var errorMessage = "The test suite has timed out because it has been running"; <del> errorMessage += " for more than " + timeoutInMinutes + " minutes."; <del> console.log(errorMessage); <add>let parentProcessId = process.argv[2] <add>let timeoutInMinutes = process.argv[3] <add>let timeoutInMilliseconds = timeoutInMinutes * 1000 * 60 <add> <add>function exitTestRunner () { <add> process.kill(parentProcessId, 'SIGINT') <add> let errorMessage = 'The test suite has timed out because it has been running' <add> errorMessage += ' for more than ' + timeoutInMinutes + ' minutes.' <add> console.log(errorMessage) <ide> } <ide> <del>setTimeout(exitTestRunner, timeoutInMilliseconds); <add>setTimeout(exitTestRunner, timeoutInMilliseconds)
1
Ruby
Ruby
make sure timestamp is properly referenced
a4207c1084439051ca0d828768382bcff86c5d92
<ide><path>activerecord/lib/active_record/attribute_methods/dirty.rb <ide> module Dirty <ide> include AttributeMethods::Write <ide> <ide> included do <del> if self < Timestamp <add> if self < ::ActiveRecord::Timestamp <ide> raise "You cannot include Dirty after Timestamp" <ide> end <ide>
1
Ruby
Ruby
add major/minor/patch examples
919e94bb9273a48ade57a2be184e9c3ee5e8dc07
<ide><path>Library/Homebrew/test/version_spec.rb <ide> expect(v2.to_str).to eq("HEAD-ffffff") <ide> end <ide> <add> describe "#major" do <add> it "returns major version token" do <add> expect(described_class.create("1").major).to be == Version::Token.create("1") <add> expect(described_class.create("1.2").major).to be == Version::Token.create("1") <add> expect(described_class.create("1.2.3").major).to be == Version::Token.create("1") <add> expect(described_class.create("1.2.3alpha").major).to be == Version::Token.create("1") <add> expect(described_class.create("1.2.3alpha4").major).to be == Version::Token.create("1") <add> expect(described_class.create("1.2.3beta4").major).to be == Version::Token.create("1") <add> expect(described_class.create("1.2.3pre4").major).to be == Version::Token.create("1") <add> expect(described_class.create("1.2.3rc4").major).to be == Version::Token.create("1") <add> expect(described_class.create("1.2.3-p4").major).to be == Version::Token.create("1") <add> end <add> end <add> <add> describe "#minor" do <add> it "returns minor version token" do <add> expect(described_class.create("1").minor).to be nil <add> expect(described_class.create("1.2").minor).to be == Version::Token.create("2") <add> expect(described_class.create("1.2.3").minor).to be == Version::Token.create("2") <add> expect(described_class.create("1.2.3alpha").minor).to be == Version::Token.create("2") <add> expect(described_class.create("1.2.3alpha4").minor).to be == Version::Token.create("2") <add> expect(described_class.create("1.2.3beta4").minor).to be == Version::Token.create("2") <add> expect(described_class.create("1.2.3pre4").minor).to be == Version::Token.create("2") <add> expect(described_class.create("1.2.3rc4").minor).to be == Version::Token.create("2") <add> expect(described_class.create("1.2.3-p4").minor).to be == Version::Token.create("2") <add> end <add> end <add> <add> describe "#patch" do <add> it "returns patch version token" do <add> expect(described_class.create("1").patch).to be nil <add> expect(described_class.create("1.2").patch).to be nil <add> expect(described_class.create("1.2.3").patch).to be == Version::Token.create("3") <add> expect(described_class.create("1.2.3alpha").patch).to be == Version::Token.create("3") <add> expect(described_class.create("1.2.3alpha4").patch).to be == Version::Token.create("3") <add> expect(described_class.create("1.2.3beta4").patch).to be == Version::Token.create("3") <add> expect(described_class.create("1.2.3pre4").patch).to be == Version::Token.create("3") <add> expect(described_class.create("1.2.3rc4").patch).to be == Version::Token.create("3") <add> expect(described_class.create("1.2.3-p4").patch).to be == Version::Token.create("3") <add> end <add> end <add> <add> describe "#major_minor" do <add> it "returns major.minor version" do <add> expect(described_class.create("1").major_minor).to be == described_class.create("1") <add> expect(described_class.create("1.2").major_minor).to be == described_class.create("1.2") <add> expect(described_class.create("1.2.3").major_minor).to be == described_class.create("1.2") <add> expect(described_class.create("1.2.3alpha").major_minor).to be == described_class.create("1.2") <add> expect(described_class.create("1.2.3alpha4").major_minor).to be == described_class.create("1.2") <add> expect(described_class.create("1.2.3beta4").major_minor).to be == described_class.create("1.2") <add> expect(described_class.create("1.2.3pre4").major_minor).to be == described_class.create("1.2") <add> expect(described_class.create("1.2.3rc4").major_minor).to be == described_class.create("1.2") <add> expect(described_class.create("1.2.3-p4").major_minor).to be == described_class.create("1.2") <add> end <add> end <add> <add> describe "#major_minor_patch" do <add> it "returns major.minor.patch version" do <add> expect(described_class.create("1").major_minor_patch).to be == described_class.create("1") <add> expect(described_class.create("1.2").major_minor_patch).to be == described_class.create("1.2") <add> expect(described_class.create("1.2.3").major_minor_patch).to be == described_class.create("1.2.3") <add> expect(described_class.create("1.2.3alpha").major_minor_patch).to be == described_class.create("1.2.3") <add> expect(described_class.create("1.2.3alpha4").major_minor_patch).to be == described_class.create("1.2.3") <add> expect(described_class.create("1.2.3beta4").major_minor_patch).to be == described_class.create("1.2.3") <add> expect(described_class.create("1.2.3pre4").major_minor_patch).to be == described_class.create("1.2.3") <add> expect(described_class.create("1.2.3rc4").major_minor_patch).to be == described_class.create("1.2.3") <add> expect(described_class.create("1.2.3-p4").major_minor_patch).to be == described_class.create("1.2.3") <add> end <add> end <add> <ide> describe "::parse" do <ide> it "returns a NULL version when the URL cannot be parsed" do <ide> expect(described_class.parse("https://brew.sh/blah.tar")).to be_null
1
PHP
PHP
make readbase64 an instance method
4a4ca7c8d40fb94b50740a6998f9238b7f7b4f5b
<ide><path>lib/Cake/Network/Email/CakeEmail.php <ide> protected function _attachFiles($boundary = null) { <ide> * @return string File contents in base64 encoding <ide> */ <ide> protected function _readFile($file) { <del> return File::readAndBase64Encode($file); <add> $f = new File($file); <add> return $f->readBase64(); <ide> } <ide> <ide> /** <ide><path>lib/Cake/Utility/File.php <ide> public function read($bytes = false, $mode = 'rb', $force = false) { <ide> } <ide> <ide> /** <del> * Read the file contents and return a base64 version of the file contents. <add> * Return the contents of this File as a base64 version of the file contents. <ide> * <del> * @param string $file The file to read. <ide> * @return string File contents in base64 encoding <ide> * @link http://book.cakephp.org/2.0/en/core-utility-libraries/file-folder.html#File::readAndBase64Encode <ide> */ <del> public static function readAndBase64Encode($file) { <del> $handle = fopen($file, 'rb'); <del> $data = fread($handle, filesize($file)); <del> $data = chunk_split(base64_encode($data)); <del> fclose($handle); <del> return $data; <add> public function readBase64() { <add> return chunk_split(base64_encode($this->read())); <ide> } <ide> <ide> /**
2
PHP
PHP
remove resource class
074cc3cc5df1062fa6ae7e274ad0c73721e6d897
<ide><path>src/Illuminate/Http/Resources/Json/Resource.php <del><?php <del> <del>namespace Illuminate\Http\Resources\Json; <del> <del>class Resource extends JsonResource <del>{ <del> // <del>}
1
Text
Text
add note about new binaries to changelog
32b8a6e735ef674e1ea2415915e41531bcd4a9c8
<ide><path>CHANGELOG.md <ide> To resolve this: <ide> After making those changes, run `sudo systemctl daemon-reload`, and `sudo <ide> systemctl restart docker` to reload changes and (re)start the docker daemon. <ide> <add>**IMPORTANT**: With Docker 1.12, a Linux docker installation now has two <add>additional binaries; `dockerd`, and `docker-proxy`. If you have scripts for <add>installing docker, please make sure to update them accordingly. <ide> <ide> ### Builder <ide> <ide> systemctl restart docker` to reload changes and (re)start the docker daemon. <ide> <ide> ### Runtime <ide> <add>+ Split the userland proxy to a separate binary (`docker-proxy`) [#23312](https://github.com/docker/docker/pull/23312) <ide> + Add `--live-restore` daemon flag to keep containers running when daemon shuts down, and regain control on startup [#23213](https://github.com/docker/docker/pull/23213) <ide> + Ability to add OCI-compatible runtimes (via `--add-runtime` daemon flag) and select one with `--runtime` on `create` and `run` [#22983](https://github.com/docker/docker/pull/22983) <ide> + New `overlay2` graphdriver for Linux 4.0+ with multiple lower directory support [#22126](https://github.com/docker/docker/pull/22126)
1
Ruby
Ruby
add missing requires
c53d3929cd5d7a2ac39411c8137d469e5047a4f4
<ide><path>activerecord/test/cases/associations/inner_join_association_test.rb <ide> require 'models/post' <ide> require 'models/comment' <ide> require 'models/author' <add>require 'models/essay' <ide> require 'models/category' <ide> require 'models/categorization' <ide> require 'models/person' <ide><path>activesupport/test/ordered_hash_test.rb <ide> require 'abstract_unit' <ide> require 'active_support/json' <ide> require 'active_support/core_ext/object/to_json' <add>require 'active_support/core_ext/hash/indifferent_access' <ide> <ide> class OrderedHashTest < Test::Unit::TestCase <ide> def setup
2
Javascript
Javascript
remove duplicate this.errno assignment
2b569deed32d7f601e9c5af0415e842e1440d46e
<ide><path>lib/cluster.js <ide> Worker.prototype.send = function() { <ide> <ide> function SharedHandle(key, address, port, addressType, backlog, fd) { <ide> this.key = key; <del> this.errno = ''; <ide> this.workers = []; <ide> this.handle = null; <ide> this.errno = 0;
1
Go
Go
move cgmounts to be a field in sysinfo
6677ab6a63014237b8149b2aeba48fccf43ac7c1
<ide><path>pkg/sysinfo/cgroup2_linux.go <ide> func newV2(quiet bool, options ...Opt) *SysInfo { <ide> applyCgroupNsInfo, <ide> } <ide> for _, o := range ops { <del> w := o(sysInfo, nil) <add> w := o(sysInfo) <ide> warnings = append(warnings, w...) <ide> } <ide> if !quiet { <ide><path>pkg/sysinfo/sysinfo.go <ide> type SysInfo struct { <ide> <ide> // Whether the cgroup is in unified mode (v2). <ide> CgroupUnified bool <add> <add> // cgMounts is the list of cgroup v1 mount paths, indexed by subsystem, to <add> // inspect availability of subsystems. <add> cgMounts map[string]string <ide> } <ide> <ide> type cgroupMemInfo struct { <ide><path>pkg/sysinfo/sysinfo_linux.go <ide> func findCgroupMountpoints() (map[string]string, error) { <ide> return mps, nil <ide> } <ide> <del>type infoCollector func(info *SysInfo, cgMounts map[string]string) (warnings []string) <add>type infoCollector func(info *SysInfo) (warnings []string) <ide> <ide> type opts struct { <ide> cg2GroupPath string <ide> func New(quiet bool, options ...Opt) *SysInfo { <ide> } <ide> <ide> func newV1(quiet bool) *SysInfo { <del> var ops []infoCollector <del> var warnings []string <del> sysInfo := &SysInfo{} <del> cgMounts, err := findCgroupMountpoints() <add> var ( <add> err error <add> ops []infoCollector <add> warnings []string <add> sysInfo = &SysInfo{} <add> ) <add> sysInfo.cgMounts, err = findCgroupMountpoints() <ide> if err != nil { <ide> logrus.Warn(err) <ide> } else { <ide> func newV1(quiet bool) *SysInfo { <ide> }...) <ide> <ide> for _, o := range ops { <del> w := o(sysInfo, cgMounts) <add> w := o(sysInfo) <ide> warnings = append(warnings, w...) <ide> } <ide> if !quiet { <ide> func newV1(quiet bool) *SysInfo { <ide> } <ide> <ide> // applyMemoryCgroupInfo adds the memory cgroup controller information to the info. <del>func applyMemoryCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <add>func applyMemoryCgroupInfo(info *SysInfo) []string { <ide> var warnings []string <del> mountPoint, ok := cgMounts["memory"] <add> mountPoint, ok := info.cgMounts["memory"] <ide> if !ok { <ide> warnings = append(warnings, "Your kernel does not support cgroup memory limit") <ide> return warnings <ide> func applyMemoryCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <ide> } <ide> <ide> // applyCPUCgroupInfo adds the cpu cgroup controller information to the info. <del>func applyCPUCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <add>func applyCPUCgroupInfo(info *SysInfo) []string { <ide> var warnings []string <del> mountPoint, ok := cgMounts["cpu"] <add> mountPoint, ok := info.cgMounts["cpu"] <ide> if !ok { <ide> warnings = append(warnings, "Unable to find cpu cgroup in mounts") <ide> return warnings <ide> func applyCPUCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <ide> } <ide> <ide> // applyBlkioCgroupInfo adds the blkio cgroup controller information to the info. <del>func applyBlkioCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <add>func applyBlkioCgroupInfo(info *SysInfo) []string { <ide> var warnings []string <del> mountPoint, ok := cgMounts["blkio"] <add> mountPoint, ok := info.cgMounts["blkio"] <ide> if !ok { <ide> warnings = append(warnings, "Unable to find blkio cgroup in mounts") <ide> return warnings <ide> func applyBlkioCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <ide> } <ide> <ide> // applyCPUSetCgroupInfo adds the cpuset cgroup controller information to the info. <del>func applyCPUSetCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <add>func applyCPUSetCgroupInfo(info *SysInfo) []string { <ide> var warnings []string <del> mountPoint, ok := cgMounts["cpuset"] <add> mountPoint, ok := info.cgMounts["cpuset"] <ide> if !ok { <ide> warnings = append(warnings, "Unable to find cpuset cgroup in mounts") <ide> return warnings <ide> func applyCPUSetCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <ide> } <ide> <ide> // applyPIDSCgroupInfo adds whether the pids cgroup controller is available to the info. <del>func applyPIDSCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <add>func applyPIDSCgroupInfo(info *SysInfo) []string { <ide> var warnings []string <del> _, ok := cgMounts["pids"] <add> _, ok := info.cgMounts["pids"] <ide> if !ok { <ide> warnings = append(warnings, "Unable to find pids cgroup in mounts") <ide> return warnings <ide> func applyPIDSCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <ide> } <ide> <ide> // applyDevicesCgroupInfo adds whether the devices cgroup controller is available to the info. <del>func applyDevicesCgroupInfo(info *SysInfo, cgMounts map[string]string) []string { <add>func applyDevicesCgroupInfo(info *SysInfo) []string { <ide> var warnings []string <del> _, ok := cgMounts["devices"] <add> _, ok := info.cgMounts["devices"] <ide> info.CgroupDevicesEnabled = ok <ide> return warnings <ide> } <ide> <ide> // applyNetworkingInfo adds networking information to the info. <del>func applyNetworkingInfo(info *SysInfo, _ map[string]string) []string { <add>func applyNetworkingInfo(info *SysInfo) []string { <ide> var warnings []string <ide> info.IPv4ForwardingDisabled = !readProcBool("/proc/sys/net/ipv4/ip_forward") <ide> info.BridgeNFCallIPTablesDisabled = !readProcBool("/proc/sys/net/bridge/bridge-nf-call-iptables") <ide> func applyNetworkingInfo(info *SysInfo, _ map[string]string) []string { <ide> } <ide> <ide> // applyAppArmorInfo adds whether AppArmor is enabled to the info. <del>func applyAppArmorInfo(info *SysInfo, _ map[string]string) []string { <add>func applyAppArmorInfo(info *SysInfo) []string { <ide> var warnings []string <ide> if _, err := os.Stat("/sys/kernel/security/apparmor"); !os.IsNotExist(err) { <ide> if _, err := ioutil.ReadFile("/sys/kernel/security/apparmor/profiles"); err == nil { <ide> func applyAppArmorInfo(info *SysInfo, _ map[string]string) []string { <ide> } <ide> <ide> // applyCgroupNsInfo adds whether cgroupns is enabled to the info. <del>func applyCgroupNsInfo(info *SysInfo, _ map[string]string) []string { <add>func applyCgroupNsInfo(info *SysInfo) []string { <ide> var warnings []string <ide> if _, err := os.Stat("/proc/self/ns/cgroup"); !os.IsNotExist(err) { <ide> info.CgroupNamespaces = true <ide> var ( <ide> ) <ide> <ide> // applySeccompInfo checks if Seccomp is supported, via CONFIG_SECCOMP. <del>func applySeccompInfo(info *SysInfo, _ map[string]string) []string { <add>func applySeccompInfo(info *SysInfo) []string { <ide> var warnings []string <ide> seccompOnce.Do(func() { <ide> // Check if Seccomp is supported, via CONFIG_SECCOMP.
3
Text
Text
fix minor typo in n-api.md
4f0971d3665e0a946d3799f0398b7a4cfd43bddf
<ide><path>doc/api/n-api.md <ide> NAPI_EXTERN napi_status napi_is_error(napi_env env, <ide> bool* result); <ide> ``` <ide> - `[in] env`: The environment that the API is invoked under. <del>- `[in] msg`: The `napi_value` to be checked. <add>- `[in] value`: The `napi_value` to be checked. <ide> - `[out] result`: Boolean value that is set to true if `napi_value` represents <ide> an error, false otherwise. <ide>
1
Java
Java
remove outdated comment in nativedetector
77a8cbcbecc54744b059e41f24c362f587db21d2
<ide><path>spring-core/src/main/java/org/springframework/core/NativeDetector.java <ide> /** <ide> * A common delegate for detecting a GraalVM native image environment. <ide> * <del> * <p>Requires using the {@code -H:+InlineBeforeAnalysis} native image compiler flag in order to allow code removal at <del> * build time. <del> * <ide> * @author Sebastien Deleuze <ide> * @since 5.3.4 <ide> */
1
Ruby
Ruby
add a small dsl for setting requirement options
f8d253950fe9c711a3743266d09effb37cdc5bd3
<ide><path>Library/Homebrew/dependencies.rb <ide> def initialize(*tags) <ide> # Should return true if this requirement is met. <ide> def satisfied?; false; end <ide> # Should return true if not meeting this requirement should fail the build. <del> def fatal?; false; end <add> def fatal? <add> self.class.fatal || false <add> end <ide> # The message to show when the requirement is not met. <ide> def message; ""; end <ide> <ide> def eql?(other) <ide> def hash <ide> message.hash <ide> end <add> <add> class << self <add> def fatal(val=nil) <add> val.nil? ? @fatal : @fatal = val <add> end <add> end <ide> end <ide> <ide> require 'requirements' <ide><path>Library/Homebrew/requirements.rb <ide> # A dependency on a language-specific module. <ide> class LanguageModuleDependency < Requirement <add> fatal true <add> <ide> def initialize language, module_name, import_name=nil <ide> @language = language <ide> @module_name = module_name <ide> @import_name = import_name || module_name <ide> end <ide> <del> def fatal?; true; end <del> <ide> def satisfied? <ide> quiet_system(*the_test) <ide> end <ide> class X11Dependency < Requirement <ide> include Comparable <ide> attr_reader :min_version <ide> <add> fatal true <add> <ide> def initialize(*tags) <ide> tags.flatten! <ide> @min_version = tags.shift if /(\d\.)+\d/ === tags.first <ide> super <ide> end <ide> <del> def fatal?; true; end <del> <ide> def satisfied? <ide> MacOS::XQuartz.installed? and (@min_version.nil? or @min_version <= MacOS::XQuartz.version) <ide> end <ide> class MPIDependency < Requirement <ide> <ide> attr_reader :lang_list <ide> <add> fatal true <add> <ide> def initialize *lang_list <ide> @lang_list = lang_list <ide> @non_functional = [] <ide> @unknown_langs = [] <ide> end <ide> <del> def fatal?; true; end <del> <ide> def mpi_wrapper_works? compiler <ide> compiler = which compiler <ide> return false if compiler.nil? or not compiler.executable? <ide> def message <ide> EOS <ide> end <ide> end <del> <ide> end <ide> <ide> # This requirement added by the `conflicts_with` DSL method. <ide> class ConflictRequirement < Requirement <ide> attr_reader :formula <ide> <add> # The user can chose to force installation even in the face of conflicts. <add> fatal !ARGV.force? <add> <ide> def initialize formula, name, opts={} <ide> @formula = formula <ide> @name = name <ide> def satisfied? <ide> keg = Formula.factory(@formula).prefix <ide> not keg.exist? && Keg.new(keg).linked? <ide> end <del> <del> # The user can chose to force installation even in the face of conflicts. <del> def fatal? <del> not ARGV.force? <del> end <ide> end <ide> <ide> class XcodeDependency < Requirement <del> def fatal?; true; end <add> fatal true <ide> <ide> def satisfied? <ide> MacOS::Xcode.installed? <ide> def message; <<-EOS.undent <ide> end <ide> <ide> class MysqlInstalled < Requirement <del> def fatal?; true; end <add> fatal true <ide> <ide> def satisfied? <ide> which 'mysql_config' <ide> def message; <<-EOS.undent <ide> end <ide> <ide> class PostgresqlInstalled < Requirement <del> def fatal?; true; end <add> fatal true <ide> <ide> def satisfied? <ide> which 'pg_config'
2
PHP
PHP
use contract for email address
2b77ef07db52f634d1b488b745bb8ff22376942d
<ide><path>src/Illuminate/Auth/Notifications/VerifyEmail.php <ide> protected function verificationUrl($notifiable) <ide> return URL::temporarySignedRoute( <ide> 'verification.verify', <ide> Carbon::now()->addMinutes(Config::get('auth.verification.expire', 60)), <del> ['id' => $notifiable->getKey(), 'hash' => sha1($notifiable->email)] <add> ['id' => $notifiable->getKey(), 'hash' => hash('sha1', $notifiable->getEmailForVerification())] <ide> ); <ide> } <ide>
1
Javascript
Javascript
reduce string concatenations
8b76c3e60c7b5c274c757257580a2c0faae69097
<ide><path>test/addons/repl-domain-abort/test.js <ide> process.on('exit', function() { <ide> <ide> const lines = [ <ide> // This line shouldn't cause an assertion error. <del> 'require(\'' + buildPath + '\')' + <add> `require('${buildPath}')` + <ide> // Log output to double check callback ran. <ide> '.method(function() { console.log(\'cb_ran\'); });', <ide> ]; <ide><path>test/common/index.js <ide> exports.isLinux = process.platform === 'linux'; <ide> exports.isOSX = process.platform === 'darwin'; <ide> <ide> exports.enoughTestMem = os.totalmem() > 0x40000000; /* 1 Gb */ <del>exports.bufferMaxSizeMsg = new RegExp('^RangeError: "size" argument' + <del> ' must not be larger than ' + <del> buffer.kMaxLength + '$'); <add>exports.bufferMaxSizeMsg = new RegExp( <add> `^RangeError: "size" argument must not be larger than ${buffer.kMaxLength}$`); <ide> const cpus = os.cpus(); <ide> exports.enoughTestCpu = Array.isArray(cpus) && <ide> (cpus.length > 1 || cpus[0].speed > 999); <ide> exports.refreshTmpDir = function() { <ide> <ide> if (process.env.TEST_THREAD_ID) { <ide> exports.PORT += process.env.TEST_THREAD_ID * 100; <del> exports.tmpDirName += '.' + process.env.TEST_THREAD_ID; <add> exports.tmpDirName += `.${process.env.TEST_THREAD_ID}`; <ide> } <ide> exports.tmpDir = path.join(testRoot, exports.tmpDirName); <ide> <ide> Object.defineProperty(exports, 'hasFipsCrypto', { <ide> if (exports.isWindows) { <ide> exports.PIPE = '\\\\.\\pipe\\libuv-test'; <ide> if (process.env.TEST_THREAD_ID) { <del> exports.PIPE += '.' + process.env.TEST_THREAD_ID; <add> exports.PIPE += `.${process.env.TEST_THREAD_ID}`; <ide> } <ide> } else { <del> exports.PIPE = exports.tmpDir + '/test.sock'; <add> exports.PIPE = `${exports.tmpDir}/test.sock`; <ide> } <ide> <ide> const ifaces = os.networkInterfaces(); <ide> exports.childShouldThrowAndAbort = function() { <ide> exports.ddCommand = function(filename, kilobytes) { <ide> if (exports.isWindows) { <ide> const p = path.resolve(exports.fixturesDir, 'create-file.js'); <del> return '"' + process.argv[0] + '" "' + p + '" "' + <del> filename + '" ' + (kilobytes * 1024); <add> return `"${process.argv[0]}" "${p}" "${filename}" ${kilobytes * 1024}`; <ide> } else { <del> return 'dd if=/dev/zero of="' + filename + '" bs=1024 count=' + kilobytes; <add> return `dd if=/dev/zero of="${filename}" bs=1024 count=${kilobytes}`; <ide> } <ide> }; <ide> <ide> exports.canCreateSymLink = function() { <ide> let output = ''; <ide> <ide> try { <del> output = execSync(whoamiPath + ' /priv', { timout: 1000 }); <add> output = execSync(`${whoamiPath} /priv`, { timout: 1000 }); <ide> } catch (e) { <ide> err = true; <ide> } finally { <ide> exports.skip = function(msg) { <ide> function ArrayStream() { <ide> this.run = function(data) { <ide> data.forEach((line) => { <del> this.emit('data', line + '\n'); <add> this.emit('data', `${line}\n`); <ide> }); <ide> }; <ide> } <ide><path>test/debugger/helper-debugger-repl.js <ide> let quit; <ide> <ide> function startDebugger(scriptToDebug) { <ide> scriptToDebug = process.env.NODE_DEBUGGER_TEST_SCRIPT || <del> common.fixturesDir + '/' + scriptToDebug; <add> `${common.fixturesDir}/${scriptToDebug}`; <ide> <del> child = spawn(process.execPath, ['debug', '--port=' + port, scriptToDebug]); <add> child = spawn(process.execPath, ['debug', `--port=${port}`, scriptToDebug]); <ide> <del> console.error('./node', 'debug', '--port=' + port, scriptToDebug); <add> console.error('./node', 'debug', `--port=${port}`, scriptToDebug); <ide> <ide> child.stdout.setEncoding('utf-8'); <ide> child.stdout.on('data', function(data) { <ide> function startDebugger(scriptToDebug) { <ide> child.on('line', function(line) { <ide> line = line.replace(/^(debug> *)+/, ''); <ide> console.log(line); <del> assert.ok(expected.length > 0, 'Got unexpected line: ' + line); <add> assert.ok(expected.length > 0, `Got unexpected line: ${line}`); <ide> <ide> const expectedLine = expected[0].lines.shift(); <del> assert.ok(line.match(expectedLine) !== null, line + ' != ' + expectedLine); <add> assert.ok(line.match(expectedLine) !== null, `${line} != ${expectedLine}`); <ide> <ide> if (expected[0].lines.length === 0) { <ide> const callback = expected[0].callback; <ide> function startDebugger(scriptToDebug) { <ide> console.error('dying badly buffer=%j', buffer); <ide> let err = 'Timeout'; <ide> if (expected.length > 0 && expected[0].lines) { <del> err = err + '. Expected: ' + expected[0].lines.shift(); <add> err = `${err}. Expected: ${expected[0].lines.shift()}`; <ide> } <ide> <ide> child.on('close', function() { <ide> function startDebugger(scriptToDebug) { <ide> function addTest(input, output) { <ide> function next() { <ide> if (expected.length > 0) { <del> console.log('debug> ' + expected[0].input); <del> child.stdin.write(expected[0].input + '\n'); <add> console.log(`debug> ${expected[0].input}`); <add> child.stdin.write(`${expected[0].input}\n`); <ide> <ide> if (!expected[0].lines) { <ide> const callback = expected[0].callback; <ide><path>test/debugger/test-debugger-repl-utf8.js <ide> <ide> 'use strict'; <ide> const common = require('../common'); <del>const script = common.fixturesDir + '/breakpoints_utf8.js'; <add>const script = `${common.fixturesDir}/breakpoints_utf8.js`; <ide> process.env.NODE_DEBUGGER_TEST_SCRIPT = script; <ide> <ide> require('./test-debugger-repl.js'); <ide><path>test/gc/test-http-client-connaborted.js <ide> let done = 0; <ide> let count = 0; <ide> let countGC = 0; <ide> <del>console.log('We should do ' + todo + ' requests'); <add>console.log(`We should do ${todo} requests`); <ide> <ide> const server = http.createServer(serverHandler); <ide> server.listen(0, getall); <ide><path>test/gc/test-http-client-onerror.js <ide> let done = 0; <ide> let count = 0; <ide> let countGC = 0; <ide> <del>console.log('We should do ' + todo + ' requests'); <add>console.log(`We should do ${todo} requests`); <ide> <ide> const server = http.createServer(serverHandler); <ide> server.listen(0, runTest); <ide><path>test/gc/test-http-client-timeout.js <ide> let done = 0; <ide> let count = 0; <ide> let countGC = 0; <ide> <del>console.log('We should do ' + todo + ' requests'); <add>console.log(`We should do ${todo} requests`); <ide> <ide> const server = http.createServer(serverHandler); <ide> server.listen(0, getall); <ide><path>test/gc/test-http-client.js <ide> let done = 0; <ide> let count = 0; <ide> let countGC = 0; <ide> <del>console.log('We should do ' + todo + ' requests'); <add>console.log(`We should do ${todo} requests`); <ide> <ide> const server = http.createServer(serverHandler); <ide> server.listen(0, getall); <ide><path>test/gc/test-net-timeout.js <ide> let done = 0; <ide> let count = 0; <ide> let countGC = 0; <ide> <del>console.log('We should do ' + todo + ' requests'); <add>console.log(`We should do ${todo} requests`); <ide> <ide> const server = net.createServer(serverHandler); <ide> server.listen(0, getall); <ide><path>test/inspector/inspector-helper.js <ide> TestSession.prototype.processMessage_ = function(message) { <ide> assert.strictEqual(id, this.expectedId_); <ide> this.expectedId_++; <ide> if (this.responseCheckers_[id]) { <del> assert(message['result'], JSON.stringify(message) + ' (response to ' + <del> JSON.stringify(this.messages_[id]) + ')'); <add> const messageJSON = JSON.stringify(message); <add> const idJSON = JSON.stringify(this.messages_[id]); <add> assert(message['result'], `${messageJSON} (response to ${idJSON})`); <ide> this.responseCheckers_[id](message['result']); <ide> delete this.responseCheckers_[id]; <ide> } <del> assert(!message['error'], JSON.stringify(message) + ' (replying to ' + <del> JSON.stringify(this.messages_[id]) + ')'); <add> const messageJSON = JSON.stringify(message); <add> const idJSON = JSON.stringify(this.messages_[id]); <add> assert(!message['error'], `${messageJSON} (replying to ${idJSON})`); <ide> delete this.messages_[id]; <ide> if (id === this.lastId_) { <ide> this.lastMessageResponseCallback_ && this.lastMessageResponseCallback_(); <ide> TestSession.prototype.sendInspectorCommands = function(commands) { <ide> }; <ide> this.sendAll_(commands, () => { <ide> timeoutId = setTimeout(() => { <del> let s = ''; <del> for (const id in this.messages_) { <del> s += id + ', '; <del> } <del> assert.fail('Messages without response: ' + <del> s.substring(0, s.length - 2)); <add> assert.fail(`Messages without response: ${ <add> Object.keys(this.messages_).join(', ')}`); <ide> }, TIMEOUT); <ide> }); <ide> }); <ide> TestSession.prototype.expectMessages = function(expects) { <ide> if (!(expects instanceof Array)) expects = [ expects ]; <ide> <ide> const callback = this.createCallbackWithTimeout_( <del> 'Matching response was not received:\n' + expects[0]); <add> `Matching response was not received:\n${expects[0]}`); <ide> this.messagefilter_ = (message) => { <ide> if (expects[0](message)) <ide> expects.shift(); <ide> TestSession.prototype.expectMessages = function(expects) { <ide> TestSession.prototype.expectStderrOutput = function(regexp) { <ide> this.harness_.addStderrFilter( <ide> regexp, <del> this.createCallbackWithTimeout_('Timed out waiting for ' + regexp)); <add> this.createCallbackWithTimeout_(`Timed out waiting for ${regexp}`)); <ide> return this; <ide> }; <ide> <ide><path>test/inspector/test-inspector.js <ide> function checkVersion(err, response) { <ide> assert.ifError(err); <ide> assert.ok(response); <ide> const expected = { <del> 'Browser': 'node.js/' + process.version, <add> 'Browser': `node.js/${process.version}`, <ide> 'Protocol-Version': '1.1', <ide> }; <ide> assert.strictEqual(JSON.stringify(response), <ide> function expectMainScriptSource(result) { <ide> const expected = helper.mainScriptSource(); <ide> const source = result['scriptSource']; <ide> assert(source && (source.includes(expected)), <del> 'Script source is wrong: ' + source); <add> `Script source is wrong: ${source}`); <ide> } <ide> <ide> function setupExpectBreakOnLine(line, url, session, scopeIdCallback) { <ide> function testI18NCharacters(session) { <ide> { <ide> 'method': 'Debugger.evaluateOnCallFrame', 'params': { <ide> 'callFrameId': '{"ordinal":0,"injectedScriptId":1}', <del> 'expression': 'console.log("' + chars + '")', <add> 'expression': `console.log("${chars}")`, <ide> 'objectGroup': 'console', <ide> 'includeCommandLineAPI': true, <ide> 'silent': false, <ide><path>test/internet/test-dns-cares-domains.js <ide> methods.forEach(function(method) { <ide> const d = domain.create(); <ide> d.run(function() { <ide> dns[method]('google.com', function() { <del> assert.strictEqual(process.domain, d, method + ' retains domain'); <add> assert.strictEqual(process.domain, d, `${method} retains domain`); <ide> }); <ide> }); <ide> }); <ide><path>test/internet/test-dns-ipv6.js <ide> TEST(function test_lookup_all_ipv6(done) { <ide> <ide> ips.forEach((ip) => { <ide> assert.ok(isIPv6(ip.address), <del> 'Invalid IPv6: ' + ip.address.toString()); <add> `Invalid IPv6: ${ip.address.toString()}`); <ide> assert.strictEqual(ip.family, 6); <ide> }); <ide> <ide><path>test/internet/test-dns.js <ide> req.oncomplete = function(err, domains) { <ide> }; <ide> <ide> process.on('exit', function() { <del> console.log(completed + ' tests completed'); <add> console.log(`${completed} tests completed`); <ide> assert.strictEqual(running, false); <ide> assert.strictEqual(expected, completed); <ide> assert.ok(getaddrinfoCallbackCalled); <ide><path>test/internet/test-tls-add-ca-cert.js <ide> const fs = require('fs'); <ide> const tls = require('tls'); <ide> <ide> function filenamePEM(n) { <del> return require('path').join(common.fixturesDir, 'keys', n + '.pem'); <add> return require('path').join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> function loadPEM(n) { <ide><path>test/known_issues/test-cwd-enoent-file.js <ide> if (process.argv[2] === 'child') { <ide> // Do nothing. <ide> } else { <ide> common.refreshTmpDir(); <del> const dir = fs.mkdtempSync(common.tmpDir + '/'); <add> const dir = fs.mkdtempSync(`${common.tmpDir}/`); <ide> process.chdir(dir); <ide> fs.rmdirSync(dir); <ide> assert.throws(process.cwd, <ide><path>test/parallel/test-assert.js <ide> assert.doesNotThrow(makeBlock(a.deepEqual, a1, a2)); <ide> <ide> // having an identical prototype property <ide> const nbRoot = { <del> toString: function() { return this.first + ' ' + this.last; } <add> toString: function() { return `${this.first} ${this.last}`; } <ide> }; <ide> <ide> function nameBuilder(first, last) { <ide><path>test/parallel/test-async-wrap-check-providers.js <ide> process.on('SIGINT', () => process.exit()); <ide> // Run from closed net server above. <ide> function checkTLS() { <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/ec-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/ec-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/ec-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/ec-cert.pem`) <ide> }; <ide> const server = tls.createServer(options, common.noop) <ide> .listen(0, function() { <ide><path>test/parallel/test-buffer-badhex.js <ide> const Buffer = require('buffer').Buffer; <ide> const hex = buf.toString('hex'); <ide> assert.deepStrictEqual(Buffer.from(hex, 'hex'), buf); <ide> <del> const badHex = hex.slice(0, 256) + 'xx' + hex.slice(256, 510); <add> const badHex = `${hex.slice(0, 256)}xx${hex.slice(256, 510)}`; <ide> assert.deepStrictEqual(Buffer.from(badHex, 'hex'), buf.slice(0, 128)); <ide> } <ide><path>test/parallel/test-buffer-includes.js <ide> const longBufferString = Buffer.from(longString); <ide> let pattern = 'ABACABADABACABA'; <ide> for (let i = 0; i < longBufferString.length - pattern.length; i += 7) { <ide> const includes = longBufferString.includes(pattern, i); <del> assert(includes, 'Long ABACABA...-string at index ' + i); <add> assert(includes, `Long ABACABA...-string at index ${i}`); <ide> } <ide> assert(longBufferString.includes('AJABACA'), 'Long AJABACA, First J'); <ide> assert(longBufferString.includes('AJABACA', 511), 'Long AJABACA, Second J'); <ide><path>test/parallel/test-buffer-indexof.js <ide> let pattern = 'ABACABADABACABA'; <ide> for (let i = 0; i < longBufferString.length - pattern.length; i += 7) { <ide> const index = longBufferString.indexOf(pattern, i); <ide> assert.strictEqual((i + 15) & ~0xf, index, <del> 'Long ABACABA...-string at index ' + i); <add> `Long ABACABA...-string at index ${i}`); <ide> } <ide> assert.strictEqual(510, longBufferString.indexOf('AJABACA'), <ide> 'Long AJABACA, First J'); <ide><path>test/parallel/test-child-process-buffering.js <ide> function pwd(callback) { <ide> <ide> child.stdout.setEncoding('utf8'); <ide> child.stdout.on('data', function(s) { <del> console.log('stdout: ' + JSON.stringify(s)); <add> console.log(`stdout: ${JSON.stringify(s)}`); <ide> output += s; <ide> }); <ide> <ide> child.on('exit', common.mustCall(function(c) { <del> console.log('exit: ' + c); <add> console.log(`exit: ${c}`); <ide> assert.strictEqual(0, c); <ide> })); <ide> <ide><path>test/parallel/test-child-process-default-options.js <ide> let response = ''; <ide> child.stdout.setEncoding('utf8'); <ide> <ide> child.stdout.on('data', function(chunk) { <del> console.log('stdout: ' + chunk); <add> console.log(`stdout: ${chunk}`); <ide> response += chunk; <ide> }); <ide> <ide><path>test/parallel/test-child-process-double-pipe.js <ide> if (common.isWindows) { <ide> <ide> // pipe echo | grep <ide> echo.stdout.on('data', function(data) { <del> console.error('grep stdin write ' + data.length); <add> console.error(`grep stdin write ${data.length}`); <ide> if (!grep.stdin.write(data)) { <ide> echo.stdout.pause(); <ide> } <ide> sed.on('exit', function() { <ide> <ide> // pipe grep | sed <ide> grep.stdout.on('data', function(data) { <del> console.error('grep stdout ' + data.length); <add> console.error(`grep stdout ${data.length}`); <ide> if (!sed.stdin.write(data)) { <ide> grep.stdout.pause(); <ide> } <ide><path>test/parallel/test-child-process-env.js <ide> let response = ''; <ide> child.stdout.setEncoding('utf8'); <ide> <ide> child.stdout.on('data', function(chunk) { <del> console.log('stdout: ' + chunk); <add> console.log(`stdout: ${chunk}`); <ide> response += chunk; <ide> }); <ide> <ide><path>test/parallel/test-child-process-exec-env.js <ide> let child; <ide> function after(err, stdout, stderr) { <ide> if (err) { <ide> error_count++; <del> console.log('error!: ' + err.code); <del> console.log('stdout: ' + JSON.stringify(stdout)); <del> console.log('stderr: ' + JSON.stringify(stderr)); <add> console.log(`error!: ${err.code}`); <add> console.log(`stdout: ${JSON.stringify(stdout)}`); <add> console.log(`stderr: ${JSON.stringify(stderr)}`); <ide> assert.strictEqual(false, err.killed); <ide> } else { <ide> success_count++; <ide><path>test/parallel/test-child-process-fork-close.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> const fork = require('child_process').fork; <ide> <del>const cp = fork(common.fixturesDir + '/child-process-message-and-exit.js'); <add>const cp = fork(`${common.fixturesDir}/child-process-message-and-exit.js`); <ide> <ide> let gotMessage = false; <ide> let gotExit = false; <ide><path>test/parallel/test-child-process-fork.js <ide> const assert = require('assert'); <ide> const fork = require('child_process').fork; <ide> const args = ['foo', 'bar']; <ide> <del>const n = fork(common.fixturesDir + '/child-process-spawn-node.js', args); <add>const n = fork(`${common.fixturesDir}/child-process-spawn-node.js`, args); <ide> <ide> assert.strictEqual(n.channel, n._channel); <ide> assert.deepStrictEqual(args, ['foo', 'bar']); <ide><path>test/parallel/test-child-process-fork3.js <ide> const common = require('../common'); <ide> const child_process = require('child_process'); <ide> <del>child_process.fork(common.fixturesDir + '/empty.js'); // should not hang <add>child_process.fork(`${common.fixturesDir}/empty.js`); // should not hang <ide><path>test/parallel/test-child-process-internal.js <ide> const assert = require('assert'); <ide> <ide> //messages <ide> const PREFIX = 'NODE_'; <del>const normal = {cmd: 'foo' + PREFIX}; <del>const internal = {cmd: PREFIX + 'bar'}; <add>const normal = {cmd: `foo${PREFIX}`}; <add>const internal = {cmd: `${PREFIX}bar`}; <ide> <ide> if (process.argv[2] === 'child') { <ide> //send non-internal message containing PREFIX at a non prefix position <ide><path>test/parallel/test-child-process-ipc.js <ide> let gotEcho = false; <ide> const child = spawn(process.argv[0], [sub]); <ide> <ide> child.stderr.on('data', function(data) { <del> console.log('parent stderr: ' + data); <add> console.log(`parent stderr: ${data}`); <ide> }); <ide> <ide> child.stdout.setEncoding('utf8'); <ide> <ide> child.stdout.on('data', function(data) { <del> console.log('child said: ' + JSON.stringify(data)); <add> console.log(`child said: ${JSON.stringify(data)}`); <ide> if (!gotHelloWorld) { <ide> console.error('testing for hello world'); <ide> assert.strictEqual('hello world\r\n', data); <ide><path>test/parallel/test-child-process-set-blocking.js <ide> const ch = require('child_process'); <ide> <ide> const SIZE = 100000; <ide> <del>const cp = ch.spawn('python', ['-c', 'print ' + SIZE + ' * "C"'], { <add>const cp = ch.spawn('python', ['-c', `print ${SIZE} * "C"`], { <ide> stdio: 'inherit' <ide> }); <ide> <ide><path>test/parallel/test-child-process-spawn-error.js <ide> const enoentChild = spawn(enoentPath, spawnargs); <ide> enoentChild.on('error', common.mustCall(function(err) { <ide> assert.strictEqual(err.code, 'ENOENT'); <ide> assert.strictEqual(err.errno, 'ENOENT'); <del> assert.strictEqual(err.syscall, 'spawn ' + enoentPath); <add> assert.strictEqual(err.syscall, `spawn ${enoentPath}`); <ide> assert.strictEqual(err.path, enoentPath); <ide> assert.deepStrictEqual(err.spawnargs, spawnargs); <ide> })); <ide><path>test/parallel/test-child-process-spawn-typeerror.js <ide> const invalidArgsMsg = /Incorrect value of args option/; <ide> const invalidOptionsMsg = /"options" argument must be an object/; <ide> const invalidFileMsg = <ide> /^TypeError: "file" argument must be a non-empty string$/; <del>const empty = common.fixturesDir + '/empty.js'; <add>const empty = `${common.fixturesDir}/empty.js`; <ide> <ide> assert.throws(function() { <ide> const child = spawn(invalidcmd, 'this is not an array'); <ide><path>test/parallel/test-child-process-spawnsync-input.js <ide> const msgOut = 'this is stdout'; <ide> const msgErr = 'this is stderr'; <ide> <ide> // this is actually not os.EOL? <del>const msgOutBuf = Buffer.from(msgOut + '\n'); <del>const msgErrBuf = Buffer.from(msgErr + '\n'); <add>const msgOutBuf = Buffer.from(`${msgOut}\n`); <add>const msgErrBuf = Buffer.from(`${msgErr}\n`); <ide> <ide> const args = [ <ide> '-e', <ide> verifyBufOutput(spawnSync(process.execPath, args)); <ide> ret = spawnSync(process.execPath, args, { encoding: 'utf8' }); <ide> <ide> checkSpawnSyncRet(ret); <del>assert.strictEqual(ret.stdout, msgOut + '\n'); <del>assert.strictEqual(ret.stderr, msgErr + '\n'); <add>assert.strictEqual(ret.stdout, `${msgOut}\n`); <add>assert.strictEqual(ret.stderr, `${msgErr}\n`); <ide><path>test/parallel/test-child-process-spawnsync-maxbuf.js <ide> const spawnSync = require('child_process').spawnSync; <ide> const msgOut = 'this is stdout'; <ide> <ide> // This is actually not os.EOL? <del>const msgOutBuf = Buffer.from(msgOut + '\n'); <add>const msgOutBuf = Buffer.from(`${msgOut}\n`); <ide> <ide> const args = [ <ide> '-e', <ide><path>test/parallel/test-child-process-stdin.js <ide> let response = ''; <ide> <ide> cat.stdout.setEncoding('utf8'); <ide> cat.stdout.on('data', function(chunk) { <del> console.log('stdout: ' + chunk); <add> console.log(`stdout: ${chunk}`); <ide> response += chunk; <ide> }); <ide> <ide><path>test/parallel/test-cli-eval.js <ide> child.exec(`${nodejs} --use-strict -p process.execArgv`, <ide> const opt = ' --eval "console.log(process.argv.slice(1).join(\' \'))"'; <ide> const cmd = `${nodejs}${opt} -- ${args}`; <ide> child.exec(cmd, common.mustCall(function(err, stdout, stderr) { <del> assert.strictEqual(stdout, args + '\n'); <add> assert.strictEqual(stdout, `${args}\n`); <ide> assert.strictEqual(stderr, ''); <ide> assert.strictEqual(err, null); <ide> })); <ide> child.exec(`${nodejs} --use-strict -p process.execArgv`, <ide> const popt = ' --print "process.argv.slice(1).join(\' \')"'; <ide> const pcmd = `${nodejs}${popt} -- ${args}`; <ide> child.exec(pcmd, common.mustCall(function(err, stdout, stderr) { <del> assert.strictEqual(stdout, args + '\n'); <add> assert.strictEqual(stdout, `${args}\n`); <ide> assert.strictEqual(stderr, ''); <ide> assert.strictEqual(err, null); <ide> })); <ide> child.exec(`${nodejs} --use-strict -p process.execArgv`, <ide> // filename. <ide> const filecmd = `${nodejs} -- ${__filename} ${args}`; <ide> child.exec(filecmd, common.mustCall(function(err, stdout, stderr) { <del> assert.strictEqual(stdout, args + '\n'); <add> assert.strictEqual(stdout, `${args}\n`); <ide> assert.strictEqual(stderr, ''); <ide> assert.strictEqual(err, null); <ide> })); <ide><path>test/parallel/test-cli-node-options.js <ide> function disallow(opt) { <ide> const options = {env: {NODE_OPTIONS: opt}}; <ide> exec(process.execPath, options, common.mustCall(function(err) { <ide> const message = err.message.split(/\r?\n/)[1]; <del> const expect = process.execPath + ': ' + opt + <del> ' is not allowed in NODE_OPTIONS'; <add> const expect = `${process.execPath}: ${opt} is not allowed in NODE_OPTIONS`; <ide> <ide> assert.strictEqual(err.code, 9); <ide> assert.strictEqual(message, expect); <ide> function disallow(opt) { <ide> <ide> const printA = require.resolve('../fixtures/printA.js'); <ide> <del>expect('-r ' + printA, 'A\nB\n'); <add>expect(`-r ${printA}`, 'A\nB\n'); <ide> expect('--no-deprecation', 'B\n'); <ide> expect('--no-warnings', 'B\n'); <ide> expect('--trace-warnings', 'B\n'); <ide> function expect(opt, want) { <ide> if (!RegExp(want).test(stdout)) { <ide> console.error('For %j, failed to find %j in: <\n%s\n>', <ide> opt, expect, stdout); <del> assert(false, 'Expected ' + expect); <add> assert(false, `Expected ${expect}`); <ide> } <ide> })); <ide> } <ide><path>test/parallel/test-cli-syntax.js <ide> const syntaxArgs = [ <ide> // no output should be produced <ide> assert.strictEqual(c.stdout, '', 'stdout produced'); <ide> assert.strictEqual(c.stderr, '', 'stderr produced'); <del> assert.strictEqual(c.status, 0, 'code === ' + c.status); <add> assert.strictEqual(c.status, 0, `code === ${c.status}`); <ide> }); <ide> }); <ide> <ide> const syntaxArgs = [ <ide> const match = c.stderr.match(/^SyntaxError: Unexpected identifier$/m); <ide> assert(match, 'stderr incorrect'); <ide> <del> assert.strictEqual(c.status, 1, 'code === ' + c.status); <add> assert.strictEqual(c.status, 1, `code === ${c.status}`); <ide> }); <ide> }); <ide> <ide> const syntaxArgs = [ <ide> const match = c.stderr.match(/^Error: Cannot find module/m); <ide> assert(match, 'stderr incorrect'); <ide> <del> assert.strictEqual(c.status, 1, 'code === ' + c.status); <add> assert.strictEqual(c.status, 1, `code === ${c.status}`); <ide> }); <ide> }); <ide> <ide> syntaxArgs.forEach(function(args) { <ide> assert.strictEqual(c.stdout, '', 'stdout produced'); <ide> assert.strictEqual(c.stderr, '', 'stderr produced'); <ide> <del> assert.strictEqual(c.status, 0, 'code === ' + c.status); <add> assert.strictEqual(c.status, 0, `code === ${c.status}`); <ide> }); <ide> <ide> // should throw if code piped from stdin with --check has bad syntax <ide> syntaxArgs.forEach(function(args) { <ide> const match = c.stderr.match(/^SyntaxError: Unexpected identifier$/m); <ide> assert(match, 'stderr incorrect'); <ide> <del> assert.strictEqual(c.status, 1, 'code === ' + c.status); <add> assert.strictEqual(c.status, 1, `code === ${c.status}`); <ide> }); <ide> <ide> // should throw if -c and -e flags are both passed <ide> syntaxArgs.forEach(function(args) { <ide> ) <ide> ); <ide> <del> assert.strictEqual(c.status, 9, 'code === ' + c.status); <add> assert.strictEqual(c.status, 9, `code === ${c.status}`); <ide> }); <ide> }); <ide><path>test/parallel/test-cluster-bind-twice.js <ide> if (!id) { <ide> a.on('exit', common.mustCall((c) => { <ide> if (c) { <ide> b.send('QUIT'); <del> throw new Error('A exited with ' + c); <add> throw new Error(`A exited with ${c}`); <ide> } <ide> })); <ide> <ide> b.on('exit', common.mustCall((c) => { <ide> if (c) { <ide> a.send('QUIT'); <del> throw new Error('B exited with ' + c); <add> throw new Error(`B exited with ${c}`); <ide> } <ide> })); <ide> <ide><path>test/parallel/test-cluster-eaccess.js <ide> if (cluster.isMaster) { <ide> <ide> } else { <ide> common.refreshTmpDir(); <del> const cp = fork(common.fixturesDir + '/listen-on-socket-and-exit.js', <add> const cp = fork(`${common.fixturesDir}/listen-on-socket-and-exit.js`, <ide> { stdio: 'inherit' }); <ide> <ide> // message from the child indicates it's ready and listening <ide><path>test/parallel/test-cluster-eaddrinuse.js <ide> const assert = require('assert'); <ide> const fork = require('child_process').fork; <ide> const net = require('net'); <ide> <del>const id = '' + process.argv[2]; <del>const port = '' + process.argv[3]; <add>const id = String(process.argv[2]); <add>const port = String(process.argv[3]); <ide> <ide> if (id === 'undefined') { <ide> const server = net.createServer(common.mustNotCall()); <ide><path>test/parallel/test-cluster-message.js <ide> if (cluster.isWorker) { <ide> if (data.code === 'received message') { <ide> check('worker', data.echo === 'message from master'); <ide> } else { <del> throw new Error('wrong TCP message received: ' + data); <add> throw new Error(`wrong TCP message received: ${data}`); <ide> } <ide> }); <ide> <ide> if (cluster.isWorker) { <ide> <ide> process.once('exit', function() { <ide> forEach(checks, function(check, type) { <del> assert.ok(check.receive, 'The ' + type + ' did not receive any message'); <del> assert.ok(check.correct, <del> 'The ' + type + ' did not get the correct message'); <add> assert.ok(check.receive, `The ${type} did not receive any message`); <add> assert.ok(check.correct, `The ${type} did not get the correct message`); <ide> }); <ide> }); <ide> } <ide><path>test/parallel/test-cluster-worker-exit.js <ide> function checkResults(expected_results, results) { <ide> const actual = results[k]; <ide> const expected = expected_results[k]; <ide> <del> assert.strictEqual(actual, <del> expected && expected.length ? expected[0] : expected, <del> (expected[1] || '') + <del> ` [expected: ${expected[0]} / actual: ${actual}]`); <add> assert.strictEqual( <add> actual, expected && expected.length ? expected[0] : expected, <add> `${expected[1] || ''} [expected: ${expected[0]} / actual: ${actual}]`); <ide> } <ide> } <ide><path>test/parallel/test-cluster-worker-kill.js <ide> function checkResults(expected_results, results) { <ide> const actual = results[k]; <ide> const expected = expected_results[k]; <ide> <del> assert.strictEqual(actual, <del> expected && expected.length ? expected[0] : expected, <del> (expected[1] || '') + <del> ` [expected: ${expected[0]} / actual: ${actual}]`); <add> assert.strictEqual( <add> actual, expected && expected.length ? expected[0] : expected, <add> `${expected[1] || ''} [expected: ${expected[0]} / actual: ${actual}]`); <ide> } <ide> } <ide><path>test/parallel/test-console.js <ide> const expectedStrings = [ <ide> ]; <ide> <ide> for (const expected of expectedStrings) { <del> assert.strictEqual(expected + '\n', strings.shift()); <del> assert.strictEqual(expected + '\n', errStrings.shift()); <add> assert.strictEqual(`${expected}\n`, strings.shift()); <add> assert.strictEqual(`${expected}\n`, errStrings.shift()); <ide> } <ide> <ide> for (const expected of expectedStrings) { <del> assert.strictEqual(expected + '\n', strings.shift()); <del> assert.strictEqual(expected + '\n', errStrings.shift()); <add> assert.strictEqual(`${expected}\n`, strings.shift()); <add> assert.strictEqual(`${expected}\n`, errStrings.shift()); <ide> } <ide> <ide> assert.strictEqual("{ foo: 'bar', inspect: [Function: inspect] }\n", <ide><path>test/parallel/test-crypto-authenticated.js <ide> for (const i in TEST_CASES) { <ide> const test = TEST_CASES[i]; <ide> <ide> if (!ciphers.includes(test.algo)) { <del> common.skip('unsupported ' + test.algo + ' test'); <add> common.skip(`unsupported ${test.algo} test`); <ide> continue; <ide> } <ide> <ide><path>test/parallel/test-crypto-binary-default.js <ide> const fs = require('fs'); <ide> const path = require('path'); <ide> const tls = require('tls'); <ide> const DH_NOT_SUITABLE_GENERATOR = crypto.constants.DH_NOT_SUITABLE_GENERATOR; <add>const fixtDir = common.fixturesDir; <ide> <ide> crypto.DEFAULT_ENCODING = 'latin1'; <ide> <ide> // Test Certificates <del>const certPem = fs.readFileSync(common.fixturesDir + '/test_cert.pem', 'ascii'); <del>const certPfx = fs.readFileSync(common.fixturesDir + '/test_cert.pfx'); <del>const keyPem = fs.readFileSync(common.fixturesDir + '/test_key.pem', 'ascii'); <del>const rsaPubPem = fs.readFileSync(common.fixturesDir + '/test_rsa_pubkey.pem', <del> 'ascii'); <del>const rsaKeyPem = fs.readFileSync(common.fixturesDir + '/test_rsa_privkey.pem', <del> 'ascii'); <add>const certPem = fs.readFileSync(`${fixtDir}/test_cert.pem`, 'ascii'); <add>const certPfx = fs.readFileSync(`${fixtDir}/test_cert.pfx`); <add>const keyPem = fs.readFileSync(`${fixtDir}/test_key.pem`, 'ascii'); <add>const rsaPubPem = fs.readFileSync(`${fixtDir}/test_rsa_pubkey.pem`, 'ascii'); <add>const rsaKeyPem = fs.readFileSync(`${fixtDir}/test_rsa_privkey.pem`, 'ascii'); <ide> <ide> // PFX tests <ide> assert.doesNotThrow(function() { <ide> const h2 = crypto.createHash('sha1').update('Test').update('123').digest('hex'); <ide> assert.strictEqual(h1, h2, 'multipled updates'); <ide> <ide> // Test hashing for binary files <del>const fn = path.join(common.fixturesDir, 'sample.png'); <add>const fn = path.join(fixtDir, 'sample.png'); <ide> const sha1Hash = crypto.createHash('sha1'); <ide> const fileStream = fs.createReadStream(fn); <ide> fileStream.on('data', function(data) { <ide> assert.strictEqual(rsaVerify.verify(rsaPubPem, rsaSignature, 'hex'), true); <ide> // Test RSA signing and verification <ide> // <ide> { <del> const privateKey = fs.readFileSync( <del> common.fixturesDir + '/test_rsa_privkey_2.pem'); <add> const privateKey = fs.readFileSync(`${fixtDir}/test_rsa_privkey_2.pem`); <ide> <del> const publicKey = fs.readFileSync( <del> common.fixturesDir + '/test_rsa_pubkey_2.pem'); <add> const publicKey = fs.readFileSync(`${fixtDir}/test_rsa_pubkey_2.pem`); <ide> <ide> const input = 'I AM THE WALRUS'; <ide> <ide> assert.strictEqual(rsaVerify.verify(rsaPubPem, rsaSignature, 'hex'), true); <ide> // Test DSA signing and verification <ide> // <ide> { <del> const privateKey = fs.readFileSync( <del> common.fixturesDir + '/test_dsa_privkey.pem'); <add> const privateKey = fs.readFileSync(`${fixtDir}/test_dsa_privkey.pem`); <ide> <del> const publicKey = fs.readFileSync( <del> common.fixturesDir + '/test_dsa_pubkey.pem'); <add> const publicKey = fs.readFileSync(`${fixtDir}/test_dsa_pubkey.pem`); <ide> <ide> const input = 'I AM THE WALRUS'; <ide> <ide><path>test/parallel/test-crypto-certificate.js <ide> crypto.DEFAULT_ENCODING = 'buffer'; <ide> const fs = require('fs'); <ide> <ide> // Test Certificates <del>const spkacValid = fs.readFileSync(common.fixturesDir + '/spkac.valid'); <del>const spkacFail = fs.readFileSync(common.fixturesDir + '/spkac.fail'); <del>const spkacPem = fs.readFileSync(common.fixturesDir + '/spkac.pem'); <add>const spkacValid = fs.readFileSync(`${common.fixturesDir}/spkac.valid`); <add>const spkacFail = fs.readFileSync(`${common.fixturesDir}/spkac.fail`); <add>const spkacPem = fs.readFileSync(`${common.fixturesDir}/spkac.pem`); <ide> <ide> const certificate = new crypto.Certificate(); <ide> <ide><path>test/parallel/test-crypto-fips.js <ide> function addToEnv(newVar, value) { <ide> } <ide> <ide> function testHelper(stream, args, expectedOutput, cmd, env) { <del> const fullArgs = args.concat(['-e', 'console.log(' + cmd + ')']); <add> const fullArgs = args.concat(['-e', `console.log(${cmd})`]); <ide> const child = spawnSync(process.execPath, fullArgs, { <ide> cwd: path.dirname(process.execPath), <ide> env: env <ide> }); <ide> <del> console.error('Spawned child [pid:' + child.pid + '] with cmd \'' + <del> cmd + '\' expect %j with args \'' + args + '\'' + <del> ' OPENSSL_CONF=%j', expectedOutput, env.OPENSSL_CONF); <add> console.error( <add> `Spawned child [pid:${child.pid}] with cmd '${cmd}' expect %j with args '${ <add> args}' OPENSSL_CONF=%j`, expectedOutput, env.OPENSSL_CONF); <ide> <ide> function childOk(child) { <del> console.error('Child #' + ++num_children_ok + <del> ' [pid:' + child.pid + '] OK.'); <add> console.error(`Child #${++num_children_ok} [pid:${child.pid}] OK.`); <ide> } <ide> <ide> function responseHandler(buffer, expectedOutput) { <ide><path>test/parallel/test-crypto-rsa-dsa.js <ide> if (!common.hasCrypto) { <ide> const constants = require('crypto').constants; <ide> const crypto = require('crypto'); <ide> <add>const fixtDir = common.fixturesDir; <add> <ide> // Test certificates <del>const certPem = fs.readFileSync(common.fixturesDir + '/test_cert.pem', 'ascii'); <del>const keyPem = fs.readFileSync(common.fixturesDir + '/test_key.pem', 'ascii'); <del>const rsaPubPem = fs.readFileSync(common.fixturesDir + '/test_rsa_pubkey.pem', <del> 'ascii'); <del>const rsaKeyPem = fs.readFileSync(common.fixturesDir + '/test_rsa_privkey.pem', <del> 'ascii'); <add>const certPem = fs.readFileSync(`${fixtDir}/test_cert.pem`, 'ascii'); <add>const keyPem = fs.readFileSync(`${fixtDir}/test_key.pem`, 'ascii'); <add>const rsaPubPem = fs.readFileSync(`${fixtDir}/test_rsa_pubkey.pem`, 'ascii'); <add>const rsaKeyPem = fs.readFileSync(`${fixtDir}/test_rsa_privkey.pem`, 'ascii'); <ide> const rsaKeyPemEncrypted = fs.readFileSync( <del> common.fixturesDir + '/test_rsa_privkey_encrypted.pem', 'ascii'); <del>const dsaPubPem = fs.readFileSync(common.fixturesDir + '/test_dsa_pubkey.pem', <del> 'ascii'); <del>const dsaKeyPem = fs.readFileSync(common.fixturesDir + '/test_dsa_privkey.pem', <del> 'ascii'); <add> `${fixtDir}/test_rsa_privkey_encrypted.pem`, 'ascii'); <add>const dsaPubPem = fs.readFileSync(`${fixtDir}/test_dsa_pubkey.pem`, 'ascii'); <add>const dsaKeyPem = fs.readFileSync(`${fixtDir}/test_dsa_privkey.pem`, 'ascii'); <ide> const dsaKeyPemEncrypted = fs.readFileSync( <del> common.fixturesDir + '/test_dsa_privkey_encrypted.pem', 'ascii'); <add> `${fixtDir}/test_dsa_privkey_encrypted.pem`, 'ascii'); <ide> <ide> const decryptError = <ide> /^Error: error:06065064:digital envelope routines:EVP_DecryptFinal_ex:bad decrypt$/; <ide> assert.throws(() => { <ide> // Test RSA signing and verification <ide> // <ide> { <del> const privateKey = fs.readFileSync( <del> common.fixturesDir + '/test_rsa_privkey_2.pem'); <add> const privateKey = fs.readFileSync(`${fixtDir}/test_rsa_privkey_2.pem`); <ide> <del> const publicKey = fs.readFileSync( <del> common.fixturesDir + '/test_rsa_pubkey_2.pem'); <add> const publicKey = fs.readFileSync(`${fixtDir}/test_rsa_pubkey_2.pem`); <ide> <ide> const input = 'I AM THE WALRUS'; <ide> <ide><path>test/parallel/test-crypto-sign-verify.js <ide> if (!common.hasCrypto) { <ide> const crypto = require('crypto'); <ide> <ide> // Test certificates <del>const certPem = fs.readFileSync(common.fixturesDir + '/test_cert.pem', 'ascii'); <del>const keyPem = fs.readFileSync(common.fixturesDir + '/test_key.pem', 'ascii'); <add>const certPem = fs.readFileSync(`${common.fixturesDir}/test_cert.pem`, 'ascii'); <add>const keyPem = fs.readFileSync(`${common.fixturesDir}/test_key.pem`, 'ascii'); <ide> const modSize = 1024; <ide> <ide> // Test signing and verifying <ide> const modSize = 1024; <ide> const msgfile = path.join(common.tmpDir, 's5.msg'); <ide> fs.writeFileSync(msgfile, msg); <ide> <del> const cmd = '"' + common.opensslCli + '" dgst -sha256 -verify "' + pubfile + <del> '" -signature "' + sigfile + <del> '" -sigopt rsa_padding_mode:pss -sigopt rsa_pss_saltlen:-2 "' + <del> msgfile + '"'; <add> const cmd = <add> `"${common.opensslCli}" dgst -sha256 -verify "${pubfile}" -signature "${ <add> sigfile}" -sigopt rsa_padding_mode:pss -sigopt rsa_pss_saltlen:-2 "${ <add> msgfile}"`; <ide> <ide> exec(cmd, common.mustCall((err, stdout, stderr) => { <ide> assert(stdout.includes('Verified OK')); <ide><path>test/parallel/test-crypto-verify-failure.js <ide> crypto.DEFAULT_ENCODING = 'buffer'; <ide> <ide> const fs = require('fs'); <ide> <del>const certPem = fs.readFileSync(common.fixturesDir + '/test_cert.pem', 'ascii'); <add>const certPem = fs.readFileSync(`${common.fixturesDir}/test_cert.pem`, 'ascii'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = tls.Server(options, (socket) => { <ide><path>test/parallel/test-crypto.js <ide> const tls = require('tls'); <ide> crypto.DEFAULT_ENCODING = 'buffer'; <ide> <ide> // Test Certificates <del>const caPem = fs.readFileSync(common.fixturesDir + '/test_ca.pem', 'ascii'); <del>const certPem = fs.readFileSync(common.fixturesDir + '/test_cert.pem', 'ascii'); <del>const certPfx = fs.readFileSync(common.fixturesDir + '/test_cert.pfx'); <del>const keyPem = fs.readFileSync(common.fixturesDir + '/test_key.pem', 'ascii'); <add>const caPem = fs.readFileSync(`${common.fixturesDir}/test_ca.pem`, 'ascii'); <add>const certPem = fs.readFileSync(`${common.fixturesDir}/test_cert.pem`, 'ascii'); <add>const certPfx = fs.readFileSync(`${common.fixturesDir}/test_cert.pfx`); <add>const keyPem = fs.readFileSync(`${common.fixturesDir}/test_key.pem`, 'ascii'); <ide> <ide> // 'this' safety <ide> // https://github.com/joyent/node/issues/6690 <ide> assert.throws(function() { <ide> // $ openssl pkcs8 -topk8 -inform PEM -outform PEM -in mykey.pem \ <ide> // -out private_key.pem -nocrypt; <ide> // Then open private_key.pem and change its header and footer. <del> const sha1_privateKey = fs.readFileSync(common.fixturesDir + <del> '/test_bad_rsa_privkey.pem', 'ascii'); <add> const sha1_privateKey = fs.readFileSync( <add> `${common.fixturesDir}/test_bad_rsa_privkey.pem`, 'ascii'); <ide> // this would inject errors onto OpenSSL's error stack <ide> crypto.createSign('sha1').sign(sha1_privateKey); <ide> }, /asn1 encoding routines:ASN1_CHECK_TLEN:wrong tag/); <ide><path>test/parallel/test-cwd-enoent-preload.js <ide> if (common.isSunOS || common.isWindows || common.isAix) { <ide> return; <ide> } <ide> <del>const dirname = common.tmpDir + '/cwd-does-not-exist-' + process.pid; <add>const dirname = `${common.tmpDir}/cwd-does-not-exist-${process.pid}`; <ide> const abspathFile = require('path').join(common.fixturesDir, 'a.js'); <ide> common.refreshTmpDir(); <ide> fs.mkdirSync(dirname); <ide><path>test/parallel/test-cwd-enoent-repl.js <ide> if (common.isSunOS || common.isWindows || common.isAix) { <ide> return; <ide> } <ide> <del>const dirname = common.tmpDir + '/cwd-does-not-exist-' + process.pid; <add>const dirname = `${common.tmpDir}/cwd-does-not-exist-${process.pid}`; <ide> common.refreshTmpDir(); <ide> fs.mkdirSync(dirname); <ide> process.chdir(dirname); <ide><path>test/parallel/test-cwd-enoent.js <ide> if (common.isSunOS || common.isWindows || common.isAix) { <ide> return; <ide> } <ide> <del>const dirname = common.tmpDir + '/cwd-does-not-exist-' + process.pid; <add>const dirname = `${common.tmpDir}/cwd-does-not-exist-${process.pid}`; <ide> common.refreshTmpDir(); <ide> fs.mkdirSync(dirname); <ide> process.chdir(dirname); <ide><path>test/parallel/test-dgram-bind-shared-ports.js <ide> if (cluster.isMaster) { <ide> <ide> socket1.on('error', (err) => { <ide> // no errors expected <del> process.send('socket1:' + err.code); <add> process.send(`socket1:${err.code}`); <ide> }); <ide> <ide> socket2.on('error', (err) => { <ide> // an error is expected on the second worker <del> process.send('socket2:' + err.code); <add> process.send(`socket2:${err.code}`); <ide> }); <ide> <ide> socket1.bind({ <ide><path>test/parallel/test-dgram-error-message-address.js <ide> socket_ipv6.on('error', common.mustCall(function(e) { <ide> const allowed = ['EADDRNOTAVAIL', 'EAFNOSUPPORT', 'EPROTONOSUPPORT']; <ide> assert.notStrictEqual(allowed.indexOf(e.code), -1); <ide> assert.strictEqual(e.port, undefined); <del> assert.strictEqual(e.message, 'bind ' + e.code + ' 111::1'); <add> assert.strictEqual(e.message, `bind ${e.code} 111::1`); <ide> assert.strictEqual(e.address, '111::1'); <ide> socket_ipv6.close(); <ide> })); <ide><path>test/parallel/test-domain-enter-exit.js <ide> c.name = 'c'; <ide> <ide> a.enter(); // push <ide> assert.deepStrictEqual(domain._stack, [a], <del> 'a not pushed: ' + names(domain._stack)); <add> `a not pushed: ${names(domain._stack)}`); <ide> <ide> b.enter(); // push <ide> assert.deepStrictEqual(domain._stack, [a, b], <del> 'b not pushed: ' + names(domain._stack)); <add> `b not pushed: ${names(domain._stack)}`); <ide> <ide> c.enter(); // push <ide> assert.deepStrictEqual(domain._stack, [a, b, c], <del> 'c not pushed: ' + names(domain._stack)); <add> `c not pushed: ${names(domain._stack)}`); <ide> <ide> b.exit(); // pop <ide> assert.deepStrictEqual(domain._stack, [a], <del> 'b and c not popped: ' + names(domain._stack)); <add> `b and c not popped: ${names(domain._stack)}`); <ide> <ide> b.enter(); // push <ide> assert.deepStrictEqual(domain._stack, [a, b], <del> 'b not pushed: ' + names(domain._stack)); <add> `b not pushed: ${names(domain._stack)}`); <ide><path>test/parallel/test-domain-uncaught-exception.js <ide> if (process.argv[2] === 'child') { <ide> test.expectedMessages.forEach(function(expectedMessage) { <ide> if (test.messagesReceived === undefined || <ide> test.messagesReceived.indexOf(expectedMessage) === -1) <del> assert.fail('test ' + test.fn.name + ' should have sent message: ' + <del> expectedMessage + ' but didn\'t'); <add> assert.fail(`test ${test.fn.name} should have sent message: ${ <add> expectedMessage} but didn't`); <ide> }); <ide> <ide> if (test.messagesReceived) { <ide> test.messagesReceived.forEach(function(receivedMessage) { <ide> if (test.expectedMessages.indexOf(receivedMessage) === -1) { <del> assert.fail('test ' + test.fn.name + <del> ' should not have sent message: ' + receivedMessage + <del> ' but did'); <add> assert.fail(`test ${test.fn.name} should not have sent message: ${ <add> receivedMessage} but did`); <ide> } <ide> }); <ide> } <ide><path>test/parallel/test-domain-with-abort-on-uncaught-exception.js <ide> if (process.argv[2] === 'child') { <ide> if (options.useTryCatch) <ide> useTryCatchOpt = 'useTryCatch'; <ide> <del> cmdToExec += process.argv[0] + ' '; <del> cmdToExec += (cmdLineOption ? cmdLineOption : '') + ' '; <del> cmdToExec += process.argv[1] + ' '; <del> cmdToExec += [ <del> 'child', <del> throwInDomainErrHandlerOpt, <del> useTryCatchOpt <del> ].join(' '); <add> cmdToExec += `${process.argv[0]} ${cmdLineOption ? cmdLineOption : ''} ${ <add> process.argv[1]} child ${throwInDomainErrHandlerOpt} ${useTryCatchOpt}`; <ide> <ide> const child = exec(cmdToExec); <ide> <ide><path>test/parallel/test-domain.js <ide> assert.strictEqual(result, 'return value'); <ide> <ide> <ide> // check if the executed function take in count the applied parameters <del>result = d.run(function(a, b) { <del> return a + ' ' + b; <del>}, 'return', 'value'); <add>result = d.run((a, b) => `${a} ${b}`, 'return', 'value'); <ide> assert.strictEqual(result, 'return value'); <ide> <ide> <ide><path>test/parallel/test-dsa-fips-invalid-key.js <ide> const fs = require('fs'); <ide> <ide> const input = 'hello'; <ide> <del>const dsapri = fs.readFileSync(common.fixturesDir + <del> '/keys/dsa_private_1025.pem'); <add>const dsapri = fs.readFileSync( <add> `${common.fixturesDir}/keys/dsa_private_1025.pem`); <ide> const sign = crypto.createSign('DSS1'); <ide> sign.update(input); <ide> <ide><path>test/parallel/test-error-reporting.js <ide> const exec = require('child_process').exec; <ide> const path = require('path'); <ide> <ide> function errExec(script, callback) { <del> const cmd = '"' + process.argv[0] + '" "' + <del> path.join(common.fixturesDir, script) + '"'; <add> const cmd = `"${process.argv[0]}" "${path.join(common.fixturesDir, script)}"`; <ide> return exec(cmd, function(err, stdout, stderr) { <ide> // There was some error <ide> assert.ok(err); <ide><path>test/parallel/test-eval.js <ide> const cmd = [ <ide> `"${process.execPath}"`, '-e', <ide> '"console.error(process.argv)"', <ide> 'foo', 'bar'].join(' '); <del>const expected = util.format([process.execPath, 'foo', 'bar']) + '\n'; <add>const expected = `${util.format([process.execPath, 'foo', 'bar'])}\n`; <ide> exec(cmd, common.mustCall((err, stdout, stderr) => { <ide> assert.ifError(err); <ide> assert.strictEqual(stderr, expected); <ide><path>test/parallel/test-exception-handler2.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> <ide> process.on('uncaughtException', function(err) { <del> console.log('Caught exception: ' + err); <add> console.log(`Caught exception: ${err}`); <ide> }); <ide> <ide> setTimeout(common.mustCall(function() { <ide><path>test/parallel/test-file-write-stream2.js <ide> process.on('exit', function() { <ide> console.log(' Test callback events missing or out of order:'); <ide> console.log(' expected: %j', cb_expected); <ide> console.log(' occurred: %j', cb_occurred); <del> assert.strictEqual(cb_occurred, cb_expected, <del> 'events missing or out of order: "' + <del> cb_occurred + '" !== "' + cb_expected + '"'); <add> assert.strictEqual( <add> cb_occurred, cb_expected, <add> `events missing or out of order: "${cb_occurred}" !== "${cb_expected}"`); <ide> } else { <ide> console.log('ok'); <ide> } <ide> file.on('error', function(err) { <ide> <ide> <ide> for (let i = 0; i < 11; i++) { <del> const ret = file.write(i + ''); <add> const ret = file.write(String(i)); <ide> console.error('%d %j', i, ret); <ide> <ide> // return false when i hits 10 <ide><path>test/parallel/test-file-write-stream3.js <ide> process.on('exit', function() { <ide> console.log(' Test callback events missing or out of order:'); <ide> console.log(' expected: %j', cb_expected); <ide> console.log(' occurred: %j', cb_occurred); <del> assert.strictEqual(cb_occurred, cb_expected, <del> 'events missing or out of order: "' + <del> cb_occurred + '" !== "' + cb_expected + '"'); <add> assert.strictEqual( <add> cb_occurred, cb_expected, <add> `events missing or out of order: "${cb_occurred}" !== "${cb_expected}"`); <ide> } <ide> }); <ide> <ide><path>test/parallel/test-fs-append-file-sync.js <ide> if (!common.isWindows) { <ide> <ide> const fileData4 = fs.readFileSync(filename4); <ide> <del>assert.strictEqual(Buffer.byteLength('' + num) + currentFileData.length, <add>assert.strictEqual(Buffer.byteLength(String(num)) + currentFileData.length, <ide> fileData4.length); <ide> <ide> // test that appendFile accepts file descriptors <ide><path>test/parallel/test-fs-append-file.js <ide> fs.appendFile(filename4, n, { mode: m }, function(e) { <ide> fs.readFile(filename4, function(e, buffer) { <ide> assert.ifError(e); <ide> ncallbacks++; <del> assert.strictEqual(Buffer.byteLength('' + n) + currentFileData.length, <add> assert.strictEqual(Buffer.byteLength(String(n)) + currentFileData.length, <ide> buffer.length); <ide> }); <ide> }); <ide><path>test/parallel/test-fs-buffertype-writesync.js <ide> common.refreshTmpDir(); <ide> v.forEach((value) => { <ide> const fd = fs.openSync(filePath, 'w'); <ide> fs.writeSync(fd, value); <del> assert.strictEqual(fs.readFileSync(filePath).toString(), value + ''); <add> assert.strictEqual(fs.readFileSync(filePath).toString(), String(value)); <ide> }); <ide><path>test/parallel/test-fs-error-messages.js <ide> try { <ide> } <ide> <ide> process.on('exit', function() { <del> assert.strictEqual(expected, errors.length, <del> 'Test fs sync exceptions raised, got ' + errors.length + <del> ' expected ' + expected); <add> assert.strictEqual( <add> expected, errors.length, <add> `Test fs sync exceptions raised, got ${errors.length} expected ${expected}` <add> ); <ide> }); <ide><path>test/parallel/test-fs-exists.js <ide> fs.exists(f, common.mustCall(function(y) { <ide> assert.strictEqual(y, true); <ide> })); <ide> <del>fs.exists(f + '-NO', common.mustCall(function(y) { <add>fs.exists(`${f}-NO`, common.mustCall(function(y) { <ide> assert.strictEqual(y, false); <ide> })); <ide> <ide> assert(fs.existsSync(f)); <del>assert(!fs.existsSync(f + '-NO')); <add>assert(!fs.existsSync(`${f}-NO`)); <ide><path>test/parallel/test-fs-mkdir.js <ide> function unlink(pathname) { <ide> common.refreshTmpDir(); <ide> <ide> { <del> const pathname = common.tmpDir + '/test1'; <add> const pathname = `${common.tmpDir}/test1`; <ide> <ide> unlink(pathname); <ide> <ide> common.refreshTmpDir(); <ide> } <ide> <ide> { <del> const pathname = common.tmpDir + '/test2'; <add> const pathname = `${common.tmpDir}/test2`; <ide> <ide> unlink(pathname); <ide> <ide> common.refreshTmpDir(); <ide> } <ide> <ide> { <del> const pathname = common.tmpDir + '/test3'; <add> const pathname = `${common.tmpDir}/test3`; <ide> <ide> unlink(pathname); <ide> fs.mkdirSync(pathname); <ide><path>test/parallel/test-fs-non-number-arguments-throw.js <ide> assert.throws(function() { <ide> "start as string didn't throw an error for createWriteStream"); <ide> <ide> saneEmitter.on('data', common.mustCall(function(data) { <del> assert.strictEqual(sanity, data.toString('utf8'), 'read ' + <del> data.toString('utf8') + ' instead of ' + sanity); <add> assert.strictEqual( <add> sanity, data.toString('utf8'), <add> `read ${data.toString('utf8')} instead of ${sanity}`); <ide> })); <ide><path>test/parallel/test-fs-readdir.js <ide> common.refreshTmpDir(); <ide> <ide> // Create the necessary files <ide> files.forEach(function(currentFile) { <del> fs.closeSync(fs.openSync(readdirDir + '/' + currentFile, 'w')); <add> fs.closeSync(fs.openSync(`${readdirDir}/${currentFile}`, 'w')); <ide> }); <ide> <ide> // Check the readdir Sync version <ide><path>test/parallel/test-fs-readfile-error.js <ide> if (common.isFreeBSD) { <ide> <ide> function test(env, cb) { <ide> const filename = path.join(common.fixturesDir, 'test-fs-readfile-error.js'); <del> const execPath = '"' + process.execPath + '" "' + filename + '"'; <add> const execPath = `"${process.execPath}" "${filename}"`; <ide> const options = { env: Object.assign(process.env, env) }; <ide> exec(execPath, options, common.mustCall((err, stdout, stderr) => { <ide> assert(err); <ide> assert.strictEqual(stdout, ''); <ide> assert.notStrictEqual(stderr, ''); <del> cb('' + stderr); <add> cb(String(stderr)); <ide> })); <ide> } <ide> <ide><path>test/parallel/test-fs-realpath.js <ide> let async_completed = 0; <ide> let async_expected = 0; <ide> const unlink = []; <ide> let skipSymlinks = false; <add>const tmpDir = common.tmpDir; <ide> <ide> common.refreshTmpDir(); <ide> <ide> if (common.isWindows) { <ide> <ide> <ide> function tmp(p) { <del> return path.join(common.tmpDir, p); <add> return path.join(tmpDir, p); <ide> } <ide> <del>const targetsAbsDir = path.join(common.tmpDir, 'targets'); <del>const tmpAbsDir = common.tmpDir; <add>const targetsAbsDir = path.join(tmpDir, 'targets'); <add>const tmpAbsDir = tmpDir; <ide> <ide> // Set up targetsAbsDir and expected subdirectories <ide> fs.mkdirSync(targetsAbsDir); <ide> function test_simple_relative_symlink(callback) { <ide> common.skip('symlink test (no privs)'); <ide> return runNextTest(); <ide> } <del> const entry = common.tmpDir + '/symlink'; <del> const expected = common.tmpDir + '/cycles/root.js'; <add> const entry = `${tmpDir}/symlink`; <add> const expected = `${tmpDir}/cycles/root.js`; <ide> [ <del> [entry, '../' + common.tmpDirName + '/cycles/root.js'] <add> [entry, `../${common.tmpDirName}/cycles/root.js`] <ide> ].forEach(function(t) { <ide> try { fs.unlinkSync(t[0]); } catch (e) {} <ide> console.log('fs.symlinkSync(%j, %j, %j)', t[1], t[0], 'file'); <ide> function test_simple_absolute_symlink(callback) { <ide> <ide> console.log('using type=%s', type); <ide> <del> const entry = tmpAbsDir + '/symlink'; <del> const expected = common.fixturesDir + '/nested-index/one'; <add> const entry = `${tmpAbsDir}/symlink`; <add> const expected = `${common.fixturesDir}/nested-index/one`; <ide> [ <ide> [entry, expected] <ide> ].forEach(function(t) { <ide> function test_cyclic_link_protection(callback) { <ide> common.skip('symlink test (no privs)'); <ide> return runNextTest(); <ide> } <del> const entry = path.join(common.tmpDir, '/cycles/realpath-3a'); <add> const entry = path.join(tmpDir, '/cycles/realpath-3a'); <ide> [ <ide> [entry, '../cycles/realpath-3b'], <del> [path.join(common.tmpDir, '/cycles/realpath-3b'), '../cycles/realpath-3c'], <del> [path.join(common.tmpDir, '/cycles/realpath-3c'), '../cycles/realpath-3a'] <add> [path.join(tmpDir, '/cycles/realpath-3b'), '../cycles/realpath-3c'], <add> [path.join(tmpDir, '/cycles/realpath-3c'), '../cycles/realpath-3a'] <ide> ].forEach(function(t) { <ide> try { fs.unlinkSync(t[0]); } catch (e) {} <ide> fs.symlinkSync(t[1], t[0], 'dir'); <ide> function test_cyclic_link_overprotection(callback) { <ide> common.skip('symlink test (no privs)'); <ide> return runNextTest(); <ide> } <del> const cycles = common.tmpDir + '/cycles'; <add> const cycles = `${tmpDir}/cycles`; <ide> const expected = fs.realpathSync(cycles); <del> const folder = cycles + '/folder'; <del> const link = folder + '/cycles'; <add> const folder = `${cycles}/folder`; <add> const link = `${folder}/cycles`; <ide> let testPath = cycles; <ide> testPath += '/folder/cycles'.repeat(10); <ide> try { fs.unlinkSync(link); } catch (ex) {} <ide> function test_relative_input_cwd(callback) { <ide> // we need to calculate the relative path to the tmp dir from cwd <ide> const entrydir = process.cwd(); <ide> const entry = path.relative(entrydir, <del> path.join(common.tmpDir + '/cycles/realpath-3a')); <del> const expected = common.tmpDir + '/cycles/root.js'; <add> path.join(`${tmpDir}/cycles/realpath-3a`)); <add> const expected = `${tmpDir}/cycles/root.js`; <ide> [ <ide> [entry, '../cycles/realpath-3b'], <del> [common.tmpDir + '/cycles/realpath-3b', '../cycles/realpath-3c'], <del> [common.tmpDir + '/cycles/realpath-3c', 'root.js'] <add> [`${tmpDir}/cycles/realpath-3b`, '../cycles/realpath-3c'], <add> [`${tmpDir}/cycles/realpath-3c`, 'root.js'] <ide> ].forEach(function(t) { <ide> const fn = t[0]; <ide> console.error('fn=%j', fn); <ide> function test_deep_symlink_mix(callback) { <ide> fs.mkdirSync(tmp('node-test-realpath-d2'), 0o700); <ide> try { <ide> [ <del> [entry, common.tmpDir + '/node-test-realpath-d1/foo'], <add> [entry, `${tmpDir}/node-test-realpath-d1/foo`], <ide> [tmp('node-test-realpath-d1'), <del> common.tmpDir + '/node-test-realpath-d2'], <add> `${tmpDir}/node-test-realpath-d2`], <ide> [tmp('node-test-realpath-d2/foo'), '../node-test-realpath-f2'], <del> [tmp('node-test-realpath-f2'), targetsAbsDir + <del> '/nested-index/one/realpath-c'], <del> [targetsAbsDir + '/nested-index/one/realpath-c', targetsAbsDir + <del> '/nested-index/two/realpath-c'], <del> [targetsAbsDir + '/nested-index/two/realpath-c', <del> common.tmpDir + '/cycles/root.js'] <add> [tmp('node-test-realpath-f2'), <add> `${targetsAbsDir}/nested-index/one/realpath-c`], <add> [`${targetsAbsDir}/nested-index/one/realpath-c`, <add> `${targetsAbsDir}/nested-index/two/realpath-c`], <add> [`${targetsAbsDir}/nested-index/two/realpath-c`, <add> `${tmpDir}/cycles/root.js`] <ide> ].forEach(function(t) { <ide> try { fs.unlinkSync(t[0]); } catch (e) {} <ide> fs.symlinkSync(t[1], t[0]); <ide> function test_deep_symlink_mix(callback) { <ide> } finally { <ide> unlink.push(tmp('node-test-realpath-d2')); <ide> } <del> const expected = tmpAbsDir + '/cycles/root.js'; <add> const expected = `${tmpAbsDir}/cycles/root.js`; <ide> assertEqualPath(fs.realpathSync(entry), path.resolve(expected)); <ide> asynctest(fs.realpath, [entry], callback, function(err, result) { <ide> assertEqualPath(result, path.resolve(expected)); <ide> function test_deep_symlink_mix(callback) { <ide> function test_non_symlinks(callback) { <ide> console.log('test_non_symlinks'); <ide> const entrydir = path.dirname(tmpAbsDir); <del> const entry = tmpAbsDir.substr(entrydir.length + 1) + '/cycles/root.js'; <del> const expected = tmpAbsDir + '/cycles/root.js'; <add> const entry = `${tmpAbsDir.substr(entrydir.length + 1)}/cycles/root.js`; <add> const expected = `${tmpAbsDir}/cycles/root.js`; <ide> const origcwd = process.cwd(); <ide> process.chdir(entrydir); <ide> assertEqualPath(fs.realpathSync(entry), path.resolve(expected)); <ide> const upone = path.join(process.cwd(), '..'); <ide> function test_escape_cwd(cb) { <ide> console.log('test_escape_cwd'); <ide> asynctest(fs.realpath, ['..'], cb, function(er, uponeActual) { <del> assertEqualPath(upone, uponeActual, <del> 'realpath("..") expected: ' + path.resolve(upone) + <del> ' actual:' + uponeActual); <add> assertEqualPath( <add> upone, uponeActual, <add> `realpath("..") expected: ${path.resolve(upone)} actual:${uponeActual}`); <ide> }); <ide> } <ide> const uponeActual = fs.realpathSync('..'); <del>assertEqualPath(upone, uponeActual, <del> 'realpathSync("..") expected: ' + path.resolve(upone) + <del> ' actual:' + uponeActual); <add>assertEqualPath( <add> upone, uponeActual, <add> `realpathSync("..") expected: ${path.resolve(upone)} actual:${uponeActual}`); <ide> <ide> <ide> // going up with .. multiple times <ide> function test_abs_with_kids(cb) { <ide> <ide> console.log('using type=%s', type); <ide> <del> const root = tmpAbsDir + '/node-test-realpath-abs-kids'; <add> const root = `${tmpAbsDir}/node-test-realpath-abs-kids`; <ide> function cleanup() { <ide> ['/a/b/c/x.txt', <ide> '/a/link' <ide> function test_abs_with_kids(cb) { <ide> '/a/b', <ide> '/a/b/c' <ide> ].forEach(function(folder) { <del> console.log('mkdir ' + root + folder); <add> console.log(`mkdir ${root}${folder}`); <ide> fs.mkdirSync(root + folder, 0o700); <ide> }); <del> fs.writeFileSync(root + '/a/b/c/x.txt', 'foo'); <del> fs.symlinkSync(root + '/a/b', root + '/a/link', type); <add> fs.writeFileSync(`${root}/a/b/c/x.txt`, 'foo'); <add> fs.symlinkSync(`${root}/a/b`, `${root}/a/link`, type); <ide> } <ide> setup(); <del> const linkPath = root + '/a/link/c/x.txt'; <del> const expectPath = root + '/a/b/c/x.txt'; <add> const linkPath = `${root}/a/link/c/x.txt`; <add> const expectPath = `${root}/a/b/c/x.txt`; <ide> const actual = fs.realpathSync(linkPath); <ide> // console.log({link:linkPath,expect:expectPath,actual:actual},'sync'); <ide> assertEqualPath(actual, path.resolve(expectPath)); <ide> function runNextTest(err) { <ide> assert.ifError(err); <ide> const test = tests.shift(); <ide> if (!test) { <del> return console.log(numtests + <del> ' subtests completed OK for fs.realpath'); <add> return console.log(`${numtests} subtests completed OK for fs.realpath`); <ide> } <ide> testsRun++; <ide> test(runNextTest); <ide><path>test/parallel/test-fs-sir-writes-alot.js <ide> function testBuffer(b) { <ide> for (let i = 0; i < b.length; i++) { <ide> bytesChecked++; <ide> if (b[i] !== 'a'.charCodeAt(0) && b[i] !== '\n'.charCodeAt(0)) { <del> throw new Error('invalid char ' + i + ',' + b[i]); <add> throw new Error(`invalid char ${i},${b[i]}`); <ide> } <ide> } <ide> } <ide><path>test/parallel/test-fs-stat.js <ide> fs.stat(__filename, common.mustCall(function(err, s) { <ide> <ide> console.dir(s); <ide> <del> console.log('isDirectory: ' + JSON.stringify(s.isDirectory())); <add> console.log(`isDirectory: ${JSON.stringify(s.isDirectory())}`); <ide> assert.strictEqual(false, s.isDirectory()); <ide> <del> console.log('isFile: ' + JSON.stringify(s.isFile())); <add> console.log(`isFile: ${JSON.stringify(s.isFile())}`); <ide> assert.strictEqual(true, s.isFile()); <ide> <del> console.log('isSocket: ' + JSON.stringify(s.isSocket())); <add> console.log(`isSocket: ${JSON.stringify(s.isSocket())}`); <ide> assert.strictEqual(false, s.isSocket()); <ide> <del> console.log('isBlockDevice: ' + JSON.stringify(s.isBlockDevice())); <add> console.log(`isBlockDevice: ${JSON.stringify(s.isBlockDevice())}`); <ide> assert.strictEqual(false, s.isBlockDevice()); <ide> <del> console.log('isCharacterDevice: ' + JSON.stringify(s.isCharacterDevice())); <add> console.log(`isCharacterDevice: ${JSON.stringify(s.isCharacterDevice())}`); <ide> assert.strictEqual(false, s.isCharacterDevice()); <ide> <del> console.log('isFIFO: ' + JSON.stringify(s.isFIFO())); <add> console.log(`isFIFO: ${JSON.stringify(s.isFIFO())}`); <ide> assert.strictEqual(false, s.isFIFO()); <ide> <del> console.log('isSymbolicLink: ' + JSON.stringify(s.isSymbolicLink())); <add> console.log(`isSymbolicLink: ${JSON.stringify(s.isSymbolicLink())}`); <ide> assert.strictEqual(false, s.isSymbolicLink()); <ide> <ide> assert.ok(s.mtime instanceof Date); <ide><path>test/parallel/test-fs-stream-double-close.js <ide> test1(fs.createReadStream(__filename)); <ide> test2(fs.createReadStream(__filename)); <ide> test3(fs.createReadStream(__filename)); <ide> <del>test1(fs.createWriteStream(common.tmpDir + '/dummy1')); <del>test2(fs.createWriteStream(common.tmpDir + '/dummy2')); <del>test3(fs.createWriteStream(common.tmpDir + '/dummy3')); <add>test1(fs.createWriteStream(`${common.tmpDir}/dummy1`)); <add>test2(fs.createWriteStream(`${common.tmpDir}/dummy2`)); <add>test3(fs.createWriteStream(`${common.tmpDir}/dummy3`)); <ide> <ide> function test1(stream) { <ide> stream.destroy(); <ide><path>test/parallel/test-fs-symlink-dir-junction-relative.js <ide> function verifyLink(linkPath) { <ide> const stats = fs.lstatSync(linkPath); <ide> assert.ok(stats.isSymbolicLink()); <ide> <del> const data1 = fs.readFileSync(linkPath + '/x.txt', 'ascii'); <del> const data2 = fs.readFileSync(linkTarget + '/x.txt', 'ascii'); <add> const data1 = fs.readFileSync(`${linkPath}/x.txt`, 'ascii'); <add> const data2 = fs.readFileSync(`${linkTarget}/x.txt`, 'ascii'); <ide> assert.strictEqual(data1, data2); <ide> <ide> // Clean up. <ide><path>test/parallel/test-fs-symlink-dir-junction.js <ide> const linkPath = path.join(common.tmpDir, 'cycles_link'); <ide> <ide> common.refreshTmpDir(); <ide> <del>console.log('linkData: ' + linkData); <del>console.log('linkPath: ' + linkPath); <add>console.log(`linkData: ${linkData}`); <add>console.log(`linkPath: ${linkPath}`); <ide> <ide> fs.symlink(linkData, linkPath, 'junction', common.mustCall(function(err) { <ide> assert.ifError(err); <ide><path>test/parallel/test-fs-timestamp-parsing-error.js <ide> const assert = require('assert'); <ide> <ide> [Infinity, -Infinity, NaN].forEach((input) => { <ide> assert.throws(() => fs._toUnixTimestamp(input), <del> new RegExp('^Error: Cannot parse time: ' + input + '$')); <add> new RegExp(`^Error: Cannot parse time: ${input}$`)); <ide> }); <ide> <ide> assert.throws(() => fs._toUnixTimestamp({}), <ide><path>test/parallel/test-fs-truncate-GH-6233.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> const fs = require('fs'); <ide> <del>const filename = common.tmpDir + '/truncate-file.txt'; <add>const filename = `${common.tmpDir}/truncate-file.txt`; <ide> <ide> common.refreshTmpDir(); <ide> <ide><path>test/parallel/test-fs-write-file.js <ide> fs.writeFile(filename3, n, { mode: m }, common.mustCall(function(e) { <ide> fs.readFile(filename3, common.mustCall(function(e, buffer) { <ide> assert.ifError(e); <ide> <del> assert.strictEqual(Buffer.byteLength('' + n), buffer.length); <add> assert.strictEqual(Buffer.byteLength(String(n)), buffer.length); <ide> })); <ide> })); <ide> <ide><path>test/parallel/test-fs-write-stream-err.js <ide> const fs = require('fs'); <ide> <ide> common.refreshTmpDir(); <ide> <del>const stream = fs.createWriteStream(common.tmpDir + '/out', { <add>const stream = fs.createWriteStream(`${common.tmpDir}/out`, { <ide> highWaterMark: 10 <ide> }); <ide> const err = new Error('BAM'); <ide><path>test/parallel/test-fs-write-string-coerce.js <ide> common.refreshTmpDir(); <ide> <ide> const fn = path.join(common.tmpDir, 'write-string-coerce.txt'); <ide> const data = true; <del>const expected = data + ''; <add>const expected = String(data); <ide> <ide> fs.open(fn, 'w', 0o644, common.mustCall(function(err, fd) { <ide> assert.ifError(err); <ide><path>test/parallel/test-http-abort-client.js <ide> server.listen(0, common.mustCall(function() { <ide> }, common.mustCall(function(res) { <ide> server.close(); <ide> <del> console.log('Got res: ' + res.statusCode); <add> console.log(`Got res: ${res.statusCode}`); <ide> console.dir(res.headers); <ide> <ide> res.on('data', function(chunk) { <del> console.log('Read ' + chunk.length + ' bytes'); <add> console.log(`Read ${chunk.length} bytes`); <ide> console.log(' chunk=%j', chunk.toString()); <ide> }); <ide> <ide><path>test/parallel/test-http-abort-queued.js <ide> server.listen(0, function() { <ide> assert.strictEqual(Object.keys(agent.sockets).length, 1); <ide> assert.strictEqual(Object.keys(agent.requests).length, 1); <ide> <del> console.log('Got res: ' + res1.statusCode); <add> console.log(`Got res: ${res1.statusCode}`); <ide> console.dir(res1.headers); <ide> <ide> res1.on('data', function(chunk) { <del> console.log('Read ' + chunk.length + ' bytes'); <add> console.log(`Read ${chunk.length} bytes`); <ide> console.log(' chunk=%j', chunk.toString()); <ide> complete(); <ide> }); <ide><path>test/parallel/test-http-after-connect.js <ide> server.listen(0, function() { <ide> function doRequest(i) { <ide> http.get({ <ide> port: server.address().port, <del> path: '/request' + i <add> path: `/request${i}` <ide> }, common.mustCall(function(res) { <ide> console.error('Client got GET response'); <ide> let data = ''; <ide> function doRequest(i) { <ide> data += chunk; <ide> }); <ide> res.on('end', function() { <del> assert.strictEqual(data, '/request' + i); <add> assert.strictEqual(data, `/request${i}`); <ide> ++clientResponses; <ide> if (clientResponses === 2) { <ide> server.close(); <ide><path>test/parallel/test-http-agent-error-on-idle.js <ide> server.listen(0, function() { <ide> process.nextTick(function() { <ide> const freeSockets = agent.freeSockets[socketKey]; <ide> assert.strictEqual(freeSockets.length, 1, <del> 'expect a free socket on ' + socketKey); <add> `expect a free socket on ${socketKey}`); <ide> <ide> //generate a random error on the free socket <ide> const freeSocket = freeSockets[0]; <ide><path>test/parallel/test-http-agent-getname.js <ide> for (const family of [0, null, undefined, 'bogus']) <ide> assert.strictEqual(agent.getName({ family }), 'localhost::'); <ide> <ide> for (const family of [4, 6]) <del> assert.strictEqual(agent.getName({ family }), 'localhost:::' + family); <add> assert.strictEqual(agent.getName({ family }), `localhost:::${family}`); <ide><path>test/parallel/test-http-agent-maxsockets.js <ide> function done() { <ide> } <ide> const freepool = agent.freeSockets[Object.keys(agent.freeSockets)[0]]; <ide> assert.strictEqual(freepool.length, 2, <del> 'expect keep 2 free sockets, but got ' + freepool.length); <add> `expect keep 2 free sockets, but got ${freepool.length}`); <ide> agent.destroy(); <ide> server.close(); <ide> } <ide><path>test/parallel/test-http-buffer-sanity.js <ide> const web = http.Server(function(req, res) { <ide> }); <ide> <ide> req.connection.on('error', function(e) { <del> console.log('http server-side error: ' + e.message); <add> console.log(`http server-side error: ${e.message}`); <ide> process.exit(1); <ide> }); <ide> }); <ide><path>test/parallel/test-http-chunk-problem.js <ide> cp.exec(ddcmd, function(err, stdout, stderr) { <ide> // End the response on exit (and log errors) <ide> cat.on('exit', (code) => { <ide> if (code !== 0) { <del> console.error('subprocess exited with code ' + code); <add> console.error(`subprocess exited with code ${code}`); <ide> process.exit(1); <ide> } <ide> }); <ide><path>test/parallel/test-http-client-abort.js <ide> const server = http.Server(function(req, res) { <ide> // event like "aborted" or something. <ide> req.on('aborted', function() { <ide> clientAborts++; <del> console.log('Got abort ' + clientAborts); <add> console.log(`Got abort ${clientAborts}`); <ide> if (clientAborts === N) { <ide> console.log('All aborts detected, you win.'); <ide> server.close(); <ide> server.listen(0, function() { <ide> console.log('Server listening.'); <ide> <ide> for (let i = 0; i < N; i++) { <del> console.log('Making client ' + i); <del> const options = { port: this.address().port, path: '/?id=' + i }; <add> console.log(`Making client ${i}`); <add> const options = { port: this.address().port, path: `/?id=${i}` }; <ide> const req = http.get(options, function(res) { <del> console.log('Client response code ' + res.statusCode); <add> console.log(`Client response code ${res.statusCode}`); <ide> <ide> res.resume(); <ide> if (++responses === N) { <ide><path>test/parallel/test-http-client-agent.js <ide> server.listen(0, function() { <ide> function request(i) { <ide> const req = http.get({ <ide> port: server.address().port, <del> path: '/' + i <add> path: `/${i}` <ide> }, function(res) { <ide> const socket = req.socket; <ide> socket.on('close', function() { <ide><path>test/parallel/test-http-client-default-headers-exist.js <ide> const server = http.createServer(function(req, res) { <ide> res.end(); <ide> <ide> assert(expectedHeaders.hasOwnProperty(req.method), <del> req.method + ' was an unexpected method'); <add> `${req.method} was an unexpected method`); <ide> <ide> const requestHeaders = Object.keys(req.headers); <ide> requestHeaders.forEach(function(header) { <ide> assert.notStrictEqual( <ide> expectedHeaders[req.method].indexOf(header.toLowerCase()), <ide> -1, <del> header + ' shoud not exist for method ' + req.method <add> `${header} should not exist for method ${req.method}` <ide> ); <ide> }); <ide> <ide> assert.strictEqual( <ide> requestHeaders.length, <ide> expectedHeaders[req.method].length, <del> 'some headers were missing for method: ' + req.method <add> `some headers were missing for method: ${req.method}` <ide> ); <ide> <ide> if (expectedMethods.length === requestCount) <ide><path>test/parallel/test-http-client-timeout-agent.js <ide> server.listen(0, options.host, function() { <ide> let req; <ide> <ide> for (requests_sent = 0; requests_sent < 30; requests_sent += 1) { <del> options.path = '/' + requests_sent; <add> options.path = `/${requests_sent}`; <ide> req = http.request(options); <ide> req.id = requests_sent; <ide> req.on('response', function(res) { <ide> res.on('data', function(data) { <del> console.log('res#' + this.req.id + ' data:' + data); <add> console.log(`res#${this.req.id} data:${data}`); <ide> }); <ide> res.on('end', function(data) { <del> console.log('res#' + this.req.id + ' end'); <add> console.log(`res#${this.req.id} end`); <ide> requests_done += 1; <ide> }); <ide> }); <ide> req.on('close', function() { <del> console.log('req#' + this.id + ' close'); <add> console.log(`req#${this.id} close`); <ide> }); <ide> req.on('error', function() { <del> console.log('req#' + this.id + ' error'); <add> console.log(`req#${this.id} error`); <ide> this.destroy(); <ide> }); <ide> req.setTimeout(50, function() { <del> console.log('req#' + this.id + ' timeout'); <add> console.log(`req#${this.id} timeout`); <ide> this.abort(); <ide> requests_done += 1; <ide> }); <ide><path>test/parallel/test-http-client-timeout-with-data.js <ide> server.listen(0, options.host, function() { <ide> })); <ide> <ide> res.on('data', common.mustCall(function(data) { <del> assert.strictEqual('' + data, '*'); <add> assert.strictEqual(String(data), '*'); <ide> nchunks++; <ide> }, 2)); <ide> <ide><path>test/parallel/test-http-client-unescaped-path.js <ide> const assert = require('assert'); <ide> const http = require('http'); <ide> <ide> for (let i = 0; i <= 32; i += 1) { <del> const path = 'bad' + String.fromCharCode(i) + 'path'; <add> const path = `bad${String.fromCharCode(i)}path`; <ide> assert.throws(() => http.get({ path }, common.mustNotCall()), <ide> /contains unescaped characters/); <ide> } <ide><path>test/parallel/test-http-client-upload.js <ide> const server = http.createServer(common.mustCall(function(req, res) { <ide> let sent_body = ''; <ide> <ide> req.on('data', function(chunk) { <del> console.log('server got: ' + JSON.stringify(chunk)); <add> console.log(`server got: ${JSON.stringify(chunk)}`); <ide> sent_body += chunk; <ide> }); <ide> <ide><path>test/parallel/test-http-connect-req-res.js <ide> server.listen(0, common.mustCall(function() { <ide> console.error('Client got CONNECT request'); <ide> <ide> // Make sure this request got removed from the pool. <del> const name = 'localhost:' + server.address().port; <add> const name = `localhost:${server.address().port}`; <ide> assert(!http.globalAgent.sockets.hasOwnProperty(name)); <ide> assert(!http.globalAgent.requests.hasOwnProperty(name)); <ide> <ide><path>test/parallel/test-http-connect.js <ide> server.listen(0, common.mustCall(function() { <ide> <ide> req.on('connect', common.mustCall((res, socket, firstBodyChunk) => { <ide> // Make sure this request got removed from the pool. <del> const name = 'localhost:' + server.address().port; <add> const name = `localhost:${server.address().port}`; <ide> assert(!http.globalAgent.sockets.hasOwnProperty(name)); <ide> assert(!http.globalAgent.requests.hasOwnProperty(name)); <ide> <ide><path>test/parallel/test-http-contentLength0.js <ide> const s = http.createServer(function(req, res) { <ide> s.listen(0, function() { <ide> <ide> const request = http.request({ port: this.address().port }, (response) => { <del> console.log('STATUS: ' + response.statusCode); <add> console.log(`STATUS: ${response.statusCode}`); <ide> s.close(); <ide> response.resume(); <ide> }); <ide><path>test/parallel/test-http-default-port.js <ide> const fs = require('fs'); <ide> const path = require('path'); <ide> const fixtures = path.join(common.fixturesDir, 'keys'); <ide> const options = { <del> key: fs.readFileSync(fixtures + '/agent1-key.pem'), <del> cert: fs.readFileSync(fixtures + '/agent1-cert.pem') <add> key: fs.readFileSync(`${fixtures}/agent1-key.pem`), <add> cert: fs.readFileSync(`${fixtures}/agent1-cert.pem`) <ide> }; <ide> let gotHttpsResp = false; <ide> let gotHttpResp = false; <ide><path>test/parallel/test-http-exceptions.js <ide> const server = http.createServer(function(req, res) { <ide> <ide> server.listen(0, function() { <ide> for (let i = 0; i < 4; i += 1) { <del> http.get({ port: this.address().port, path: '/busy/' + i }); <add> http.get({ port: this.address().port, path: `/busy/${i}` }); <ide> } <ide> }); <ide> <ide> let exception_count = 0; <ide> <ide> process.on('uncaughtException', function(err) { <del> console.log('Caught an exception: ' + err); <add> console.log(`Caught an exception: ${err}`); <ide> if (err.name === 'AssertionError') throw err; <ide> if (++exception_count === 4) process.exit(0); <ide> }); <ide><path>test/parallel/test-http-expect-continue.js <ide> server.on('listening', function() { <ide> req.on('response', function(res) { <ide> assert.strictEqual(got_continue, true, <ide> 'Full response received before 100 Continue'); <del> assert.strictEqual(200, res.statusCode, 'Final status code was ' + <del> res.statusCode + ', not 200.'); <add> assert.strictEqual(200, res.statusCode, <add> `Final status code was ${res.statusCode}, not 200.`); <ide> res.setEncoding('utf8'); <ide> res.on('data', function(chunk) { body += chunk; }); <ide> res.on('end', function() { <ide><path>test/parallel/test-http-expect-handling.js <ide> function nextTest() { <ide> } <ide> <ide> http.get(options, function(response) { <del> console.log('client: expected status: ' + test); <del> console.log('client: statusCode: ' + response.statusCode); <add> console.log(`client: expected status: ${test}`); <add> console.log(`client: statusCode: ${response.statusCode}`); <ide> assert.strictEqual(response.statusCode, test); <ide> assert.strictEqual(response.statusMessage, 'Expectation Failed'); <ide> <ide><path>test/parallel/test-http-extra-response.js <ide> const server = net.createServer(function(socket) { <ide> server.listen(0, common.mustCall(function() { <ide> http.get({ port: this.address().port }, common.mustCall(function(res) { <ide> let buffer = ''; <del> console.log('Got res code: ' + res.statusCode); <add> console.log(`Got res code: ${res.statusCode}`); <ide> <ide> res.setEncoding('utf8'); <ide> res.on('data', function(chunk) { <ide> buffer += chunk; <ide> }); <ide> <ide> res.on('end', common.mustCall(function() { <del> console.log('Response ended, read ' + buffer.length + ' bytes'); <add> console.log(`Response ended, read ${buffer.length} bytes`); <ide> assert.strictEqual(body, buffer); <ide> server.close(); <ide> })); <ide><path>test/parallel/test-http-full-response.js <ide> function runAb(opts, callback) { <ide> exec(command, function(err, stdout, stderr) { <ide> if (err) { <ide> if (/ab|apr/mi.test(stderr)) { <del> common.skip('problem spawning `ab`.\n' + stderr); <add> common.skip(`problem spawning \`ab\`.\n${stderr}`); <ide> process.reallyExit(0); <ide> } <ide> process.exit(); <ide><path>test/parallel/test-http-get-pipeline-problem.js <ide> http.globalAgent.maxSockets = 1; <ide> <ide> common.refreshTmpDir(); <ide> <del>const image = fs.readFileSync(common.fixturesDir + '/person.jpg'); <add>const image = fs.readFileSync(`${common.fixturesDir}/person.jpg`); <ide> <del>console.log('image.length = ' + image.length); <add>console.log(`image.length = ${image.length}`); <ide> <ide> const total = 10; <ide> let requests = 0; <ide> server.listen(0, function() { <ide> }; <ide> <ide> http.get(opts, function(res) { <del> console.error('recv ' + x); <del> const s = fs.createWriteStream(common.tmpDir + '/' + x + '.jpg'); <add> console.error(`recv ${x}`); <add> const s = fs.createWriteStream(`${common.tmpDir}/${x}.jpg`); <ide> res.pipe(s); <ide> <ide> s.on('finish', function() { <del> console.error('done ' + x); <add> console.error(`done ${x}`); <ide> if (++responses === total) { <ide> checkFiles(); <ide> } <ide> function checkFiles() { <ide> assert(total <= files.length); <ide> <ide> for (let i = 0; i < total; i++) { <del> const fn = i + '.jpg'; <del> assert.ok(files.includes(fn), "couldn't find '" + fn + "'"); <del> const stat = fs.statSync(common.tmpDir + '/' + fn); <del> assert.strictEqual(image.length, stat.size, <del> "size doesn't match on '" + fn + <del> "'. Got " + stat.size + ' bytes'); <add> const fn = `${i}.jpg`; <add> assert.ok(files.includes(fn), `couldn't find '${fn}'`); <add> const stat = fs.statSync(`${common.tmpDir}/${fn}`); <add> assert.strictEqual( <add> image.length, stat.size, <add> `size doesn't match on '${fn}'. Got ${stat.size} bytes`); <ide> } <ide> <ide> checkedFiles = true; <ide><path>test/parallel/test-http-host-headers.js <ide> function reqHandler(req, res) { <ide> if (req.url === '/setHostFalse5') { <ide> assert.strictEqual(req.headers.host, undefined); <ide> } else { <del> assert.strictEqual(req.headers.host, `localhost:${this.address().port}`, <del> 'Wrong host header for req[' + req.url + ']: ' + <del> req.headers.host); <add> assert.strictEqual( <add> req.headers.host, `localhost:${this.address().port}`, <add> `Wrong host header for req[${req.url}]: ${req.headers.host}`); <ide> } <ide> res.writeHead(200, {}); <ide> res.end('ok'); <ide> function testHttp() { <ide> assert.ifError(er); <ide> http.get({ <ide> method: 'GET', <del> path: '/' + (counter++), <add> path: `/${counter++}`, <ide> host: 'localhost', <ide> port: httpServer.address().port, <ide> rejectUnauthorized: false <ide> }, cb).on('error', common.mustNotCall()); <ide> <ide> http.request({ <ide> method: 'GET', <del> path: '/' + (counter++), <add> path: `/${counter++}`, <ide> host: 'localhost', <ide> port: httpServer.address().port, <ide> rejectUnauthorized: false <ide> }, cb).on('error', common.mustNotCall()).end(); <ide> <ide> http.request({ <ide> method: 'POST', <del> path: '/' + (counter++), <add> path: `/${counter++}`, <ide> host: 'localhost', <ide> port: httpServer.address().port, <ide> rejectUnauthorized: false <ide> }, cb).on('error', common.mustNotCall()).end(); <ide> <ide> http.request({ <ide> method: 'PUT', <del> path: '/' + (counter++), <add> path: `/${counter++}`, <ide> host: 'localhost', <ide> port: httpServer.address().port, <ide> rejectUnauthorized: false <ide> }, cb).on('error', common.mustNotCall()).end(); <ide> <ide> http.request({ <ide> method: 'DELETE', <del> path: '/' + (counter++), <add> path: `/${counter++}`, <ide> host: 'localhost', <ide> port: httpServer.address().port, <ide> rejectUnauthorized: false <ide><path>test/parallel/test-http-invalidheaderfield2.js <ide> const checkInvalidHeaderChar = require('_http_common')._checkInvalidHeaderChar; <ide> '4+2', <ide> '3.14159265359' <ide> ].forEach(function(str) { <del> assert.strictEqual(checkIsHttpToken(str), <del> true, <del> 'checkIsHttpToken(' + <del> inspect(str) + <del> ') unexpectedly failed'); <add> assert.strictEqual( <add> checkIsHttpToken(str), true, <add> `checkIsHttpToken(${inspect(str)}) unexpectedly failed`); <ide> }); <ide> // Bad header field names <ide> [ <ide> const checkInvalidHeaderChar = require('_http_common')._checkInvalidHeaderChar; <ide> '"Quote"', <ide> 'This,That' <ide> ].forEach(function(str) { <del> assert.strictEqual(checkIsHttpToken(str), <del> false, <del> 'checkIsHttpToken(' + <del> inspect(str) + <del> ') unexpectedly succeeded'); <add> assert.strictEqual( <add> checkIsHttpToken(str), false, <add> `checkIsHttpToken(${inspect(str)}) unexpectedly succeeded`); <ide> }); <ide> <ide> <ide> const checkInvalidHeaderChar = require('_http_common')._checkInvalidHeaderChar; <ide> '0123456789ABCdef', <ide> '!@#$%^&*()-_=+\\;\':"[]{}<>,./?|~`' <ide> ].forEach(function(str) { <del> assert.strictEqual(checkInvalidHeaderChar(str), <del> false, <del> 'checkInvalidHeaderChar(' + <del> inspect(str) + <del> ') unexpectedly failed'); <add> assert.strictEqual( <add> checkInvalidHeaderChar(str), false, <add> `checkInvalidHeaderChar(${inspect(str)}) unexpectedly failed`); <ide> }); <ide> <ide> // Bad header field values <ide> const checkInvalidHeaderChar = require('_http_common')._checkInvalidHeaderChar; <ide> 'foo\vbar', <ide> 'Ding!\x07' <ide> ].forEach(function(str) { <del> assert.strictEqual(checkInvalidHeaderChar(str), <del> true, <del> 'checkInvalidHeaderChar(' + <del> inspect(str) + <del> ') unexpectedly succeeded'); <add> assert.strictEqual( <add> checkInvalidHeaderChar(str), true, <add> `checkInvalidHeaderChar(${inspect(str)}) unexpectedly succeeded`); <ide> }); <ide><path>test/parallel/test-http-keepalive-client.js <ide> function makeRequest(n) { <ide> const req = http.request({ <ide> port: server.address().port, <ide> agent: agent, <del> path: '/' + n <add> path: `/${n}` <ide> }); <ide> <ide> req.end(); <ide> function makeRequest(n) { <ide> data += c; <ide> }); <ide> res.on('end', function() { <del> assert.strictEqual(data, '/' + n); <add> assert.strictEqual(data, `/${n}`); <ide> setTimeout(function() { <ide> actualRequests++; <ide> makeRequest(n - 1); <ide><path>test/parallel/test-http-keepalive-maxsockets.js <ide> server.listen(0, function() { <ide> function makeReq(i, cb) { <ide> http.request({ <ide> port: server.address().port, <del> path: '/' + i, <add> path: `/${i}`, <ide> agent: agent <ide> }, function(res) { <ide> let data = ''; <ide> server.listen(0, function() { <ide> data += c; <ide> }); <ide> res.on('end', function() { <del> assert.strictEqual(data, '/' + i); <add> assert.strictEqual(data, `/${i}`); <ide> cb(); <ide> }); <ide> }).end(); <ide><path>test/parallel/test-http-keepalive-request.js <ide> function makeRequest(n) { <ide> <ide> const req = http.request({ <ide> port: server.address().port, <del> path: '/' + n, <add> path: `/${n}`, <ide> agent: agent <ide> }); <ide> <ide> function makeRequest(n) { <ide> data += c; <ide> }); <ide> res.on('end', function() { <del> assert.strictEqual(data, '/' + n); <add> assert.strictEqual(data, `/${n}`); <ide> setTimeout(function() { <ide> actualRequests++; <ide> makeRequest(n - 1); <ide><path>test/parallel/test-http-localaddress-bind-error.js <ide> const http = require('http'); <ide> const invalidLocalAddress = '1.2.3.4'; <ide> <ide> const server = http.createServer(function(req, res) { <del> console.log('Connect from: ' + req.connection.remoteAddress); <add> console.log(`Connect from: ${req.connection.remoteAddress}`); <ide> <ide> req.on('end', function() { <ide> res.writeHead(200, { 'Content-Type': 'text/plain' }); <del> res.end('You are from: ' + req.connection.remoteAddress); <add> res.end(`You are from: ${req.connection.remoteAddress}`); <ide> }); <ide> req.resume(); <ide> }); <ide> server.listen(0, '127.0.0.1', common.mustCall(function() { <ide> }, function(res) { <ide> assert.fail('unexpectedly got response from server'); <ide> }).on('error', common.mustCall(function(e) { <del> console.log('client got error: ' + e.message); <add> console.log(`client got error: ${e.message}`); <ide> server.close(); <ide> })).end(); <ide> })); <ide><path>test/parallel/test-http-localaddress.js <ide> if (!common.hasMultiLocalhost()) { <ide> } <ide> <ide> const server = http.createServer(function(req, res) { <del> console.log('Connect from: ' + req.connection.remoteAddress); <add> console.log(`Connect from: ${req.connection.remoteAddress}`); <ide> assert.strictEqual('127.0.0.2', req.connection.remoteAddress); <ide> <ide> req.on('end', function() { <ide> res.writeHead(200, { 'Content-Type': 'text/plain' }); <del> res.end('You are from: ' + req.connection.remoteAddress); <add> res.end(`You are from: ${req.connection.remoteAddress}`); <ide> }); <ide> req.resume(); <ide> }); <ide><path>test/parallel/test-http-malformed-request.js <ide> let nrequests_completed = 0; <ide> const nrequests_expected = 1; <ide> <ide> const server = http.createServer(function(req, res) { <del> console.log('req: ' + JSON.stringify(url.parse(req.url))); <add> console.log(`req: ${JSON.stringify(url.parse(req.url))}`); <ide> <ide> res.writeHead(200, {'Content-Type': 'text/plain'}); <ide> res.write('Hello World'); <ide><path>test/parallel/test-http-max-headers-count.js <ide> let responses = 0; <ide> const headers = {}; <ide> const N = 2000; <ide> for (let i = 0; i < N; ++i) { <del> headers['key' + i] = i; <add> headers[`key${i}`] = i; <ide> } <ide> <ide> const maxAndExpected = [ // for server <ide><path>test/parallel/test-http-outgoing-finish.js <ide> function write(out) { <ide> finishEvent = true; <ide> console.error('%s finish event', name); <ide> process.nextTick(function() { <del> assert(endCb, name + ' got finish event before endcb!'); <add> assert(endCb, `${name} got finish event before endcb!`); <ide> console.log('ok - %s finishEvent', name); <ide> }); <ide> }); <ide> function write(out) { <ide> endCb = true; <ide> console.error('%s endCb', name); <ide> process.nextTick(function() { <del> assert(finishEvent, name + ' got endCb event before finishEvent!'); <add> assert(finishEvent, `${name} got endCb event before finishEvent!`); <ide> console.log('ok - %s endCb', name); <ide> }); <ide> }); <ide><path>test/parallel/test-http-parser.js <ide> function mustCall(f, times) { <ide> <ide> function expectBody(expected) { <ide> return mustCall(function(buf, start, len) { <del> const body = '' + buf.slice(start, start + len); <add> const body = String(buf.slice(start, start + len)); <ide> assert.strictEqual(body, expected); <ide> }); <ide> } <ide> function expectBody(expected) { <ide> // Simple request test. <ide> // <ide> { <del> const request = Buffer.from( <del> 'GET /hello HTTP/1.1' + CRLF + <del> CRLF); <add> const request = Buffer.from(`GET /hello HTTP/1.1${CRLF}${CRLF}`); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <ide> method, url, statusCode, statusMessage, <ide> function expectBody(expected) { <ide> }; <ide> <ide> const onBody = (buf, start, len) => { <del> const body = '' + buf.slice(start, start + len); <add> const body = String(buf.slice(start, start + len)); <ide> assert.strictEqual(body, 'pong'); <ide> }; <ide> <ide> function expectBody(expected) { <ide> // <ide> { <ide> const request = Buffer.from( <del> 'HTTP/1.0 200 Connection established' + CRLF + <del> CRLF); <add> `HTTP/1.0 200 Connection established${CRLF}${CRLF}`); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <ide> method, url, statusCode, statusMessage, <ide> function expectBody(expected) { <ide> }; <ide> <ide> const onBody = (buf, start, len) => { <del> const body = '' + buf.slice(start, start + len); <add> const body = String(buf.slice(start, start + len)); <ide> assert.strictEqual(body, 'ping'); <ide> seen_body = true; <ide> }; <ide> function expectBody(expected) { <ide> // <ide> { <ide> // 256 X-Filler headers <del> let lots_of_headers = 'X-Filler: 42' + CRLF; <del> lots_of_headers = lots_of_headers.repeat(256); <add> const lots_of_headers = `X-Filler: 42${CRLF}`.repeat(256); <ide> <ide> const request = Buffer.from( <ide> 'GET /foo/bar/baz?quux=42#1337 HTTP/1.0' + CRLF + <ide> function expectBody(expected) { <ide> }; <ide> <ide> const onBody = (buf, start, len) => { <del> const body = '' + buf.slice(start, start + len); <add> const body = String(buf.slice(start, start + len)); <ide> assert.strictEqual(body, 'foo=42&bar=1337'); <ide> }; <ide> <ide> function expectBody(expected) { <ide> const body_parts = ['123', '123456', '1234567890']; <ide> <ide> const onBody = (buf, start, len) => { <del> const body = '' + buf.slice(start, start + len); <add> const body = String(buf.slice(start, start + len)); <ide> assert.strictEqual(body, body_parts[body_part++]); <ide> }; <ide> <ide> function expectBody(expected) { <ide> ['123', '123456', '123456789', '123456789ABC', '123456789ABCDEF']; <ide> <ide> const onBody = (buf, start, len) => { <del> const body = '' + buf.slice(start, start + len); <add> const body = String(buf.slice(start, start + len)); <ide> assert.strictEqual(body, body_parts[body_part++]); <ide> }; <ide> <ide> function expectBody(expected) { <ide> let expected_body = '123123456123456789123456789ABC123456789ABCDEF'; <ide> <ide> const onBody = (buf, start, len) => { <del> const chunk = '' + buf.slice(start, start + len); <add> const chunk = String(buf.slice(start, start + len)); <ide> assert.strictEqual(expected_body.indexOf(chunk), 0); <ide> expected_body = expected_body.slice(chunk.length); <ide> }; <ide> function expectBody(expected) { <ide> <ide> for (let i = 1; i < request.length - 1; ++i) { <ide> const a = request.slice(0, i); <del> console.error('request.slice(0, ' + i + ') = ', <del> JSON.stringify(a.toString())); <add> console.error(`request.slice(0, ${i}) = ${JSON.stringify(a.toString())}`); <ide> const b = request.slice(i); <del> console.error('request.slice(' + i + ') = ', <del> JSON.stringify(b.toString())); <add> console.error(`request.slice(${i}) = ${JSON.stringify(b.toString())}`); <ide> test(a, b); <ide> } <ide> } <ide> function expectBody(expected) { <ide> let expected_body = '123123456123456789123456789ABC123456789ABCDEF'; <ide> <ide> const onBody = (buf, start, len) => { <del> const chunk = '' + buf.slice(start, start + len); <add> const chunk = String(buf.slice(start, start + len)); <ide> assert.strictEqual(expected_body.indexOf(chunk), 0); <ide> expected_body = expected_body.slice(chunk.length); <ide> }; <ide> function expectBody(expected) { <ide> // Test parser 'this' safety <ide> // https://github.com/joyent/node/issues/6690 <ide> assert.throws(function() { <del> const request = Buffer.from( <del> 'GET /hello HTTP/1.1' + CRLF + <del> CRLF); <add> const request = Buffer.from(`GET /hello HTTP/1.1${CRLF}${CRLF}`); <ide> <ide> const parser = newParser(REQUEST); <ide> const notparser = { execute: parser.execute }; <ide><path>test/parallel/test-http-pipe-fs.js <ide> const server = http.createServer(common.mustCall(function(req, res) { <ide> } <ide> }, function(res) { <ide> res.on('end', function() { <del> console.error('res' + i + ' end'); <add> console.error(`res${i} end`); <ide> if (i === 2) { <ide> server.close(); <ide> } <ide> }); <ide> res.resume(); <ide> }); <ide> req.on('socket', function(s) { <del> console.error('req' + i + ' start'); <add> console.error(`req${i} start`); <ide> }); <ide> req.end('12345'); <ide> }(i + 1)); <ide><path>test/parallel/test-http-proxy.js <ide> const backend = http.createServer(function(req, res) { <ide> }); <ide> <ide> const proxy = http.createServer(function(req, res) { <del> console.error('proxy req headers: ' + JSON.stringify(req.headers)); <add> console.error(`proxy req headers: ${JSON.stringify(req.headers)}`); <ide> http.get({ <ide> port: backend.address().port, <ide> path: url.parse(req.url).pathname <ide> }, function(proxy_res) { <ide> <del> console.error('proxy res headers: ' + JSON.stringify(proxy_res.headers)); <add> console.error(`proxy res headers: ${JSON.stringify(proxy_res.headers)}`); <ide> <ide> assert.strictEqual('world', proxy_res.headers['hello']); <ide> assert.strictEqual('text/plain', proxy_res.headers['content-type']); <ide><path>test/parallel/test-http-request-methods.js <ide> const http = require('http'); <ide> c.setEncoding('utf8'); <ide> <ide> c.on('connect', function() { <del> c.write(method + ' / HTTP/1.0\r\n\r\n'); <add> c.write(`${method} / HTTP/1.0\r\n\r\n`); <ide> }); <ide> <ide> c.on('data', function(chunk) { <ide><path>test/parallel/test-http-response-no-headers.js <ide> const expected = { <ide> <ide> function test(httpVersion, callback) { <ide> const server = net.createServer(function(conn) { <del> const reply = 'HTTP/' + httpVersion + ' 200 OK\r\n\r\n' + <del> expected[httpVersion]; <add> const reply = `HTTP/${httpVersion} 200 OK\r\n\r\n${expected[httpVersion]}`; <ide> <ide> conn.end(reply); <ide> }); <ide><path>test/parallel/test-http-response-status-message.js <ide> function runTest(testCaseIndex) { <ide> port: server.address().port, <ide> path: testCase.path <ide> }, function(response) { <del> console.log('client: expected status message: ' + testCase.statusMessage); <del> console.log('client: actual status message: ' + response.statusMessage); <add> console.log(`client: expected status message: ${testCase.statusMessage}`); <add> console.log(`client: actual status message: ${response.statusMessage}`); <ide> assert.strictEqual(testCase.statusMessage, response.statusMessage); <ide> <ide> response.on('end', function() { <ide><path>test/parallel/test-http-server-multiheaders2.js <ide> const multipleForbidden = [ <ide> <ide> const srv = http.createServer(function(req, res) { <ide> multipleForbidden.forEach(function(header) { <del> assert.strictEqual(req.headers[header.toLowerCase()], <del> 'foo', 'header parsed incorrectly: ' + header); <add> assert.strictEqual(req.headers[header.toLowerCase()], 'foo', <add> `header parsed incorrectly: ${header}`); <ide> }); <ide> multipleAllowed.forEach(function(header) { <ide> const sep = (header.toLowerCase() === 'cookie' ? '; ' : ', '); <del> assert.strictEqual(req.headers[header.toLowerCase()], <del> 'foo' + sep + 'bar', <del> 'header parsed incorrectly: ' + header); <add> assert.strictEqual(req.headers[header.toLowerCase()], `foo${sep}bar`, <add> `header parsed incorrectly: ${header}`); <ide> }); <ide> <ide> res.writeHead(200, {'Content-Type': 'text/plain'}); <ide><path>test/parallel/test-http-should-keep-alive.js <ide> const server = net.createServer(function(socket) { <ide> }).listen(0, function() { <ide> function makeRequest() { <ide> const req = http.get({port: server.address().port}, function(res) { <del> assert.strictEqual(req.shouldKeepAlive, SHOULD_KEEP_ALIVE[responses], <del> SERVER_RESPONSES[responses] + ' should ' + <del> (SHOULD_KEEP_ALIVE[responses] ? '' : 'not ') + <del> 'Keep-Alive'); <add> assert.strictEqual( <add> req.shouldKeepAlive, SHOULD_KEEP_ALIVE[responses], <add> `${SERVER_RESPONSES[responses]} should ${ <add> SHOULD_KEEP_ALIVE[responses] ? '' : 'not '}Keep-Alive`); <ide> ++responses; <ide> if (responses < SHOULD_KEEP_ALIVE.length) { <ide> makeRequest(); <ide><path>test/parallel/test-http-status-code.js <ide> let testIdx = 0; <ide> const s = http.createServer(function(req, res) { <ide> const t = tests[testIdx]; <ide> res.writeHead(t, {'Content-Type': 'text/plain'}); <del> console.log('--\nserver: statusCode after writeHead: ' + res.statusCode); <add> console.log(`--\nserver: statusCode after writeHead: ${res.statusCode}`); <ide> assert.strictEqual(res.statusCode, t); <ide> res.end('hello world\n'); <ide> }); <ide> function nextTest() { <ide> const test = tests[testIdx]; <ide> <ide> http.get({ port: s.address().port }, function(response) { <del> console.log('client: expected status: ' + test); <del> console.log('client: statusCode: ' + response.statusCode); <add> console.log(`client: expected status: ${test}`); <add> console.log(`client: statusCode: ${response.statusCode}`); <ide> assert.strictEqual(response.statusCode, test); <ide> response.on('end', function() { <ide> testsComplete++; <ide><path>test/parallel/test-http-upgrade-agent.js <ide> srv.listen(0, '127.0.0.1', common.mustCall(function() { <ide> 'upgrade': 'websocket' <ide> } <ide> }; <del> const name = options.host + ':' + options.port; <add> const name = `${options.host}:${options.port}`; <ide> <ide> const req = http.request(options); <ide> req.end(); <ide><path>test/parallel/test-http-url.parse-basic.js <ide> function check(request) { <ide> assert.strictEqual(request.url, '/'); <ide> // the host header should use the url.parse.hostname <ide> assert.strictEqual(request.headers.host, <del> testURL.hostname + ':' + testURL.port); <add> `${testURL.hostname}:${testURL.port}`); <ide> } <ide> <ide> const server = http.createServer(function(request, response) { <ide><path>test/parallel/test-http-url.parse-https.request.js <ide> const fs = require('fs'); <ide> <ide> // https options <ide> const httpsOptions = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> function check(request) { <ide><path>test/parallel/test-http-url.parse-post.js <ide> function check(request) { <ide> assert.strictEqual(request.url, '/asdf?qwer=zxcv'); <ide> //the host header should use the url.parse.hostname <ide> assert.strictEqual(request.headers.host, <del> testURL.hostname + ':' + testURL.port); <add> `${testURL.hostname}:${testURL.port}`); <ide> } <ide> <ide> const server = http.createServer(function(request, response) { <ide><path>test/parallel/test-http-write-empty-string.js <ide> const assert = require('assert'); <ide> const http = require('http'); <ide> <ide> const server = http.createServer(function(request, response) { <del> console.log('responding to ' + request.url); <add> console.log(`responding to ${request.url}`); <ide> <ide> response.writeHead(200, {'Content-Type': 'text/plain'}); <ide> response.write('1\n'); <ide><path>test/parallel/test-http.js <ide> const server = http.Server(common.mustCall(function(req, res) { <ide> <ide> req.on('end', function() { <ide> res.writeHead(200, {'Content-Type': 'text/plain'}); <del> res.write('The path was ' + url.parse(req.url).pathname); <add> res.write(`The path was ${url.parse(req.url).pathname}`); <ide> res.end(); <ide> }); <ide> req.resume(); <ide><path>test/parallel/test-https-agent-create-connection.js <ide> const agent = new https.Agent(); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <ide> }; <ide> <ide> const expectedHeader = /^HTTP\/1\.1 200 OK/; <ide><path>test/parallel/test-https-agent-disable-session-reuse.js <ide> const https = require('https'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const clientSessions = []; <ide><path>test/parallel/test-https-agent-secure-protocol.js <ide> const https = require('https'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <del> ca: fs.readFileSync(common.fixturesDir + '/keys/ca1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <add> ca: fs.readFileSync(`${common.fixturesDir}/keys/ca1-cert.pem`) <ide> }; <ide> <ide> const server = https.Server(options, function(req, res) { <ide><path>test/parallel/test-https-agent-servername.js <ide> const https = require('https'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <del> ca: fs.readFileSync(common.fixturesDir + '/keys/ca1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <add> ca: fs.readFileSync(`${common.fixturesDir}/keys/ca1-cert.pem`) <ide> }; <ide> <ide> <ide><path>test/parallel/test-https-agent-session-eviction.js <ide> const fs = require('fs'); <ide> const SSL_OP_NO_TICKET = require('crypto').constants.SSL_OP_NO_TICKET; <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <ide> secureOptions: SSL_OP_NO_TICKET <ide> }; <ide> <ide><path>test/parallel/test-https-agent-session-reuse.js <ide> const crypto = require('crypto'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <del>const ca = fs.readFileSync(common.fixturesDir + '/keys/ca1-cert.pem'); <add>const ca = fs.readFileSync(`${common.fixturesDir}/keys/ca1-cert.pem`); <ide> <ide> const clientSessions = {}; <ide> let serverRequests = 0; <ide><path>test/parallel/test-https-agent-sni.js <ide> const https = require('https'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const TOTAL = 4; <ide> server.listen(0, function() { <ide> function expectResponse(id) { <ide> return common.mustCall(function(res) { <ide> res.resume(); <del> assert.strictEqual(res.headers['x-sni'], 'sni.' + id); <add> assert.strictEqual(res.headers['x-sni'], `sni.${id}`); <ide> }); <ide> } <ide> <ide> server.listen(0, function() { <ide> path: '/', <ide> port: this.address().port, <ide> host: '127.0.0.1', <del> servername: 'sni.' + j, <add> servername: `sni.${j}`, <ide> rejectUnauthorized: false <ide> }, expectResponse(j)); <ide> } <ide><path>test/parallel/test-https-agent-sockets-leak.js <ide> const https = require('https'); <ide> const assert = require('assert'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <del> ca: fs.readFileSync(common.fixturesDir + '/keys/ca1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <add> ca: fs.readFileSync(`${common.fixturesDir}/keys/ca1-cert.pem`) <ide> }; <ide> <ide> const server = https.Server(options, common.mustCall((req, res) => { <ide><path>test/parallel/test-https-agent.js <ide> const https = require('https'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> <ide><path>test/parallel/test-https-byteswritten.js <ide> if (!common.hasCrypto) { <ide> const https = require('https'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const body = 'hello world\n'; <ide><path>test/parallel/test-https-client-get-url.js <ide> const url = require('url'); <ide> const URL = url.URL; <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = https.createServer(options, common.mustCall(function(req, res) { <ide><path>test/parallel/test-https-client-resume.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`) <ide> }; <ide> <ide> // create server <ide><path>test/parallel/test-https-close.js <ide> if (!common.hasCrypto) { <ide> const https = require('https'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const connections = {}; <ide> const server = https.createServer(options, function(req, res) { <ide> }); <ide> <ide> server.on('connection', function(connection) { <del> const key = connection.remoteAddress + ':' + connection.remotePort; <add> const key = `${connection.remoteAddress}:${connection.remotePort}`; <ide> connection.on('close', function() { <ide> delete connections[key]; <ide> }); <ide><path>test/parallel/test-https-drain.js <ide> server.listen(0, function() { <ide> return process.nextTick(send); <ide> } <ide> sent += bufSize; <del> console.error('sent: ' + sent); <add> console.error(`sent: ${sent}`); <ide> resumed = true; <ide> res.resume(); <ide> console.error('resumed'); <ide> server.listen(0, function() { <ide> } <ide> received += data.length; <ide> if (received >= sent) { <del> console.error('received: ' + received); <add> console.error(`received: ${received}`); <ide> req.end(); <ide> server.close(); <ide> } <ide><path>test/parallel/test-https-eof-for-eom.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> <ide><path>test/parallel/test-https-foafssl.js <ide> if (!common.hasCrypto) { <ide> const https = require('https'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/agent.key'), <del> cert: fs.readFileSync(common.fixturesDir + '/agent.crt'), <add> key: fs.readFileSync(`${common.fixturesDir}/agent.key`), <add> cert: fs.readFileSync(`${common.fixturesDir}/agent.crt`), <ide> requestCert: true, <ide> rejectUnauthorized: false <ide> }; <ide><path>test/parallel/test-https-host-headers.js <ide> const https = require('https'); <ide> <ide> const fs = require('fs'); <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> const httpsServer = https.createServer(options, reqHandler); <ide> <ide> function reqHandler(req, res) { <del> console.log('Got request: ' + req.headers.host + ' ' + req.url); <add> console.log(`Got request: ${req.headers.host} ${req.url}`); <ide> if (req.url === '/setHostFalse5') { <ide> assert.strictEqual(req.headers.host, undefined); <ide> } else { <del> assert.strictEqual(req.headers.host, `localhost:${this.address().port}`, <del> 'Wrong host header for req[' + req.url + ']: ' + <del> req.headers.host); <add> assert.strictEqual( <add> req.headers.host, `localhost:${this.address().port}`, <add> `Wrong host header for req[${req.url}]: ${req.headers.host}`); <ide> } <ide> res.writeHead(200, {}); <ide> //process.nextTick(function() { res.end('ok'); }); <ide> function testHttps() { <ide> <ide> function cb(res) { <ide> counter--; <del> console.log('back from https request. counter = ' + counter); <add> console.log(`back from https request. counter = ${counter}`); <ide> if (counter === 0) { <ide> httpsServer.close(); <ide> console.log('ok'); <ide> function testHttps() { <ide> assert.ifError(er); <ide> https.get({ <ide> method: 'GET', <del> path: '/' + (counter++), <add> path: `/${counter++}`, <ide> host: 'localhost', <ide> //agent: false, <ide> port: this.address().port, <ide> function testHttps() { <ide> <ide> https.request({ <ide> method: 'GET', <del> path: '/' + (counter++), <add> path: `/${counter++}`, <ide> host: 'localhost', <ide> //agent: false, <ide> port: this.address().port, <ide> function testHttps() { <ide> <ide> https.request({ <ide> method: 'POST', <del> path: '/' + (counter++), <add> path: `/${counter++}`, <ide> host: 'localhost', <ide> //agent: false, <ide> port: this.address().port, <ide> function testHttps() { <ide> <ide> https.request({ <ide> method: 'PUT', <del> path: '/' + (counter++), <add> path: `/${counter++}`, <ide> host: 'localhost', <ide> //agent: false, <ide> port: this.address().port, <ide> function testHttps() { <ide> <ide> https.request({ <ide> method: 'DELETE', <del> path: '/' + (counter++), <add> path: `/${counter++}`, <ide> host: 'localhost', <ide> //agent: false, <ide> port: this.address().port, <ide> function testHttps() { <ide> <ide> https.get({ <ide> method: 'GET', <del> path: '/setHostFalse' + (counter++), <add> path: `/setHostFalse${counter++}`, <ide> host: 'localhost', <ide> setHost: false, <ide> port: this.address().port, <ide><path>test/parallel/test-https-localaddress-bind-error.js <ide> if (!common.hasCrypto) { <ide> const https = require('https'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const invalidLocalAddress = '1.2.3.4'; <ide> <ide> const server = https.createServer(options, function(req, res) { <del> console.log('Connect from: ' + req.connection.remoteAddress); <add> console.log(`Connect from: ${req.connection.remoteAddress}`); <ide> <ide> req.on('end', function() { <ide> res.writeHead(200, { 'Content-Type': 'text/plain' }); <del> res.end('You are from: ' + req.connection.remoteAddress); <add> res.end(`You are from: ${req.connection.remoteAddress}`); <ide> }); <ide> req.resume(); <ide> }); <ide> server.listen(0, '127.0.0.1', common.mustCall(function() { <ide> }, function(res) { <ide> assert.fail('unexpectedly got response from server'); <ide> }).on('error', common.mustCall(function(e) { <del> console.log('client got error: ' + e.message); <add> console.log(`client got error: ${e.message}`); <ide> server.close(); <ide> })).end(); <ide> })); <ide><path>test/parallel/test-https-localaddress.js <ide> if (!common.hasMultiLocalhost()) { <ide> } <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = https.createServer(options, function(req, res) { <del> console.log('Connect from: ' + req.connection.remoteAddress); <add> console.log(`Connect from: ${req.connection.remoteAddress}`); <ide> assert.strictEqual('127.0.0.2', req.connection.remoteAddress); <ide> <ide> req.on('end', function() { <ide> res.writeHead(200, { 'Content-Type': 'text/plain' }); <del> res.end('You are from: ' + req.connection.remoteAddress); <add> res.end(`You are from: ${req.connection.remoteAddress}`); <ide> }); <ide> req.resume(); <ide> }); <ide><path>test/parallel/test-https-pfx.js <ide> if (!common.hasCrypto) { <ide> } <ide> const https = require('https'); <ide> <del>const pfx = fs.readFileSync(common.fixturesDir + '/test_cert.pfx'); <add>const pfx = fs.readFileSync(`${common.fixturesDir}/test_cert.pfx`); <ide> <ide> const options = { <ide> host: '127.0.0.1', <ide><path>test/parallel/test-https-req-split.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> // Force splitting incoming data <ide> tls.SLAB_BUFFER_SIZE = 1; <ide> const server = https.createServer(options); <ide> server.on('upgrade', common.mustCall(function(req, socket, upgrade) { <ide> socket.on('data', function(data) { <del> throw new Error('Unexpected data: ' + data); <add> throw new Error(`Unexpected data: ${data}`); <ide> }); <ide> socket.end('HTTP/1.1 200 Ok\r\n\r\n'); <ide> })); <ide><path>test/parallel/test-https-resume-after-renew.js <ide> const https = require('https'); <ide> const crypto = require('crypto'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <del> ca: fs.readFileSync(common.fixturesDir + '/keys/ca1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <add> ca: fs.readFileSync(`${common.fixturesDir}/keys/ca1-cert.pem`) <ide> }; <ide> <ide> const server = https.createServer(options, function(req, res) { <ide><path>test/parallel/test-https-simple.js <ide> const https = require('https'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const tests = 2; <ide><path>test/parallel/test-https-socket-options.js <ide> const fs = require('fs'); <ide> const http = require('http'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const body = 'hello world\n'; <ide><path>test/parallel/test-https-strict.js <ide> function makeReq(path, port, error, host, ca) { <ide> port === server2.address().port ? server2 : <ide> port === server3.address().port ? server3 : <ide> null; <del> if (!server) throw new Error('invalid port: ' + port); <add> if (!server) throw new Error(`invalid port: ${port}`); <ide> server.expectCount++; <ide> <ide> req.on('response', common.mustCall((res) => { <ide><path>test/parallel/test-https-timeout-server-2.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = https.createServer(options, common.mustNotCall()); <ide><path>test/parallel/test-https-timeout-server.js <ide> const net = require('net'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <ide> handshakeTimeout: 50 <ide> }; <ide> <ide><path>test/parallel/test-https-timeout.js <ide> const https = require('https'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> // a server that never replies <ide><path>test/parallel/test-https-truncate.js <ide> const https = require('https'); <ide> <ide> const fs = require('fs'); <ide> <del>const key = fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'); <del>const cert = fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'); <add>const key = fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`); <add>const cert = fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`); <ide> <ide> // number of bytes discovered empirically to trigger the bug <ide> const data = Buffer.alloc(1024 * 32 + 1); <ide><path>test/parallel/test-intl.js <ide> assert.strictEqual('ç'.toUpperCase(), 'Ç'); <ide> <ide> if (!common.hasIntl) { <ide> const erMsg = <del> '"Intl" object is NOT present but v8_enable_i18n_support is ' + <del> enablei18n; <add> `"Intl" object is NOT present but v8_enable_i18n_support is ${enablei18n}`; <ide> assert.strictEqual(enablei18n, 0, erMsg); <ide> common.skip('Intl tests because Intl object not present.'); <ide> <ide> } else { <ide> const erMsg = <del> '"Intl" object is present but v8_enable_i18n_support is ' + <del> enablei18n + <del> '. Is this test out of date?'; <add> `"Intl" object is present but v8_enable_i18n_support is ${ <add> enablei18n}. Is this test out of date?`; <ide> assert.strictEqual(enablei18n, 1, erMsg); <ide> <ide> // Construct a new date at the beginning of Unix time <ide> if (!common.hasIntl) { <ide> common.skip('detailed Intl tests because English is not ' + <ide> 'listed as supported.'); <ide> // Smoke test. Does it format anything, or fail? <del> console.log('Date(0) formatted to: ' + dtf.format(date0)); <add> console.log(`Date(0) formatted to: ${dtf.format(date0)}`); <ide> return; <ide> } <ide> <ide><path>test/parallel/test-module-globalpaths-nodepath.js <ide> const partC = ''; <ide> if (common.isWindows) { <ide> partA = 'C:\\Users\\Rocko Artischocko\\AppData\\Roaming\\npm'; <ide> partB = 'C:\\Program Files (x86)\\nodejs\\'; <del> process.env['NODE_PATH'] = partA + ';' + partB + ';' + partC; <add> process.env['NODE_PATH'] = `${partA};${partB};${partC}`; <ide> } else { <ide> partA = '/usr/test/lib/node_modules'; <ide> partB = '/usr/test/lib/node'; <del> process.env['NODE_PATH'] = partA + ':' + partB + ':' + partC; <add> process.env['NODE_PATH'] = `${partA}:${partB}:${partC}`; <ide> } <ide> <ide> mod._initPaths(); <ide><path>test/parallel/test-module-loading-globalpaths.js <ide> if (process.argv[2] === 'child') { <ide> child_process.execFileSync(testExecPath, [ __filename, 'child' ], <ide> { encoding: 'utf8', env: env }); <ide> }, <del> new RegExp('Cannot find module \'' + pkgName + '\'')); <add> new RegExp(`Cannot find module '${pkgName}'`)); <ide> <ide> // Test module in $HOME/.node_modules. <ide> const modHomeDir = path.join(testFixturesDir, 'home-pkg-in-node_modules'); <ide> if (process.argv[2] === 'child') { <ide> fs.mkdirSync(prefixLibPath); <ide> const prefixLibNodePath = path.join(prefixLibPath, 'node'); <ide> fs.mkdirSync(prefixLibNodePath); <del> const pkgPath = path.join(prefixLibNodePath, pkgName + '.js'); <del> fs.writeFileSync(pkgPath, 'exports.string = \'' + expectedString + '\';'); <add> const pkgPath = path.join(prefixLibNodePath, `${pkgName}.js`); <add> fs.writeFileSync(pkgPath, `exports.string = '${expectedString}';`); <ide> <ide> env['HOME'] = env['USERPROFILE'] = noPkgHomeDir; <ide> runTest(expectedString, env); <ide><path>test/parallel/test-module-nodemodulepaths.js <ide> node-gyp/node_modules/glob/node_modules', <ide> const platformCases = common.isWindows ? cases.WIN : cases.POSIX; <ide> platformCases.forEach((c) => { <ide> const paths = _module._nodeModulePaths(c.file); <del> assert.deepStrictEqual(c.expect, paths, 'case ' + c.file + <del> ' failed, actual paths is ' + JSON.stringify(paths)); <add> assert.deepStrictEqual( <add> c.expect, paths, <add> `case ${c.file} failed, actual paths is ${JSON.stringify(paths)}`); <ide> }); <ide><path>test/parallel/test-module-require-depth.js <ide> const internalModule = require('internal/module'); <ide> <ide> // Module one loads two too so the expected depth for two is, well, two. <ide> assert.strictEqual(internalModule.requireDepth, 0); <del>const one = require(common.fixturesDir + '/module-require-depth/one'); <del>const two = require(common.fixturesDir + '/module-require-depth/two'); <add>const one = require(`${common.fixturesDir}/module-require-depth/one`); <add>const two = require(`${common.fixturesDir}/module-require-depth/two`); <ide> assert.deepStrictEqual(one, { requireDepth: 1 }); <ide> assert.deepStrictEqual(two, { requireDepth: 2 }); <ide> assert.strictEqual(internalModule.requireDepth, 0); <ide><path>test/parallel/test-net-better-error-messages-path.js <ide> c.on('connect', common.mustNotCall()); <ide> <ide> c.on('error', common.mustCall(function(e) { <ide> assert.strictEqual(e.code, 'ENOENT'); <del> assert.strictEqual(e.message, 'connect ENOENT ' + fp); <add> assert.strictEqual(e.message, `connect ENOENT ${fp}`); <ide> })); <ide><path>test/parallel/test-net-bytes-stats.js <ide> const tcp = net.Server(function(s) { <ide> <ide> s.on('end', function() { <ide> bytesRead += s.bytesRead; <del> console.log('tcp socket disconnect #' + count); <add> console.log(`tcp socket disconnect #${count}`); <ide> }); <ide> }); <ide> <ide> tcp.listen(0, function doTest() { <ide> <ide> socket.on('close', function() { <ide> console.error('CLIENT close event #%d', count); <del> console.log('Bytes read: ' + bytesRead); <del> console.log('Bytes written: ' + bytesWritten); <add> console.log(`Bytes read: ${bytesRead}`); <add> console.log(`Bytes written: ${bytesWritten}`); <ide> if (count < 2) { <ide> console.error('RECONNECTING'); <ide> socket.connect(tcp.address().port); <ide><path>test/parallel/test-net-connect-buffer.js <ide> const tcp = net.Server(function(s) { <ide> }); <ide> <ide> s.on('error', function(e) { <del> console.log('tcp server-side error: ' + e.message); <add> console.log(`tcp server-side error: ${e.message}`); <ide> process.exit(1); <ide> }); <ide> }); <ide> tcp.listen(0, function() { <ide> connectHappened = true; <ide> }); <ide> <del> console.log('connecting = ' + socket.connecting); <add> console.log(`connecting = ${socket.connecting}`); <ide> <ide> assert.strictEqual('opening', socket.readyState); <ide> <ide><path>test/parallel/test-net-connect-options-allowhalfopen.js <ide> const forAllClients = (cb) => common.mustCall(cb, CLIENT_VARIANTS); <ide> console.error(`No. ${index} client received FIN`); <ide> assert(!client.readable); <ide> assert(client.writable); <del> assert(client.write(index + '')); <add> assert(client.write(String(index))); <ide> client.end(); <ide> clientSentFIN++; <ide> console.error(`No. ${index} client sent FIN, ` + <ide><path>test/parallel/test-net-connect-options-fd.js <ide> const forAllClients = (cb) => common.mustCall(cb, CLIENT_VARIANTS); <ide> }) <ide> .on('error', function(err) { <ide> console.error(err); <del> assert.fail(null, null, '[Pipe server]' + err); <add> assert.fail(null, null, `[Pipe server]${err}`); <ide> }) <ide> .listen({path: serverPath}, common.mustCall(function serverOnListen() { <ide> const getSocketOpt = (index) => { <ide> const handle = new Pipe(); <ide> const err = handle.bind(`${prefix}-client-${socketCounter++}`); <del> assert(err >= 0, '' + err); <add> assert(err >= 0, String(err)); <ide> assert.notStrictEqual(handle.fd, -1); <ide> handleMap.set(index, handle); <ide> console.error(`[Pipe]Bound handle with Pipe ${handle.fd}`); <ide> const forAllClients = (cb) => common.mustCall(cb, CLIENT_VARIANTS); <ide> assert(handleMap.has(index)); <ide> const oldHandle = handleMap.get(index); <ide> assert.strictEqual(oldHandle.fd, this._handle.fd); <del> client.write(oldHandle.fd + ''); <add> client.write(String(oldHandle.fd)); <ide> console.error(`[Pipe]Sending data through fd ${oldHandle.fd}`); <ide> client.on('error', function(err) { <ide> console.error(err); <del> assert.fail(null, null, '[Pipe Client]' + err); <add> assert.fail(null, null, `[Pipe Client]${err}`); <ide> }); <ide> }); <ide> <ide><path>test/parallel/test-net-connect-options-port.js <ide> const net = require('net'); <ide> <ide> // Total connections = 3 * 4(canConnect) * 6(doConnect) = 72 <ide> canConnect(port); <del> canConnect(port + ''); <del> canConnect('0x' + port.toString(16)); <add> canConnect(String(port)); <add> canConnect(`0x${port.toString(16)}`); <ide> })); <ide> <ide> // Try connecting to random ports, but do so once the server is closed <ide> function canConnect(port) { <ide> function asyncFailToConnect(port) { <ide> const onError = () => common.mustCall(function(err) { <ide> const regexp = /^Error: connect (E\w+)(.+)$/; <del> assert(regexp.test(err + ''), err + ''); <add> assert(regexp.test(String(err)), String(err)); <ide> }); <ide> <ide> const dont = () => common.mustNotCall(); <ide><path>test/parallel/test-net-internal.js <ide> const isLegalPort = require('internal/net').isLegalPort; <ide> <ide> for (let n = 0; n <= 0xFFFF; n++) { <ide> assert(isLegalPort(n)); <del> assert(isLegalPort('' + n)); <add> assert(isLegalPort(String(n))); <ide> assert(`0x${n.toString(16)}`); <ide> assert(`0o${n.toString(8)}`); <ide> assert(`0b${n.toString(2)}`); <ide><path>test/parallel/test-net-listen-shared-ports.js <ide> if (cluster.isMaster) { <ide> <ide> server1.on('error', function(err) { <ide> // no errors expected <del> process.send('server1:' + err.code); <add> process.send(`server1:${err.code}`); <ide> }); <ide> <ide> server2.on('error', function(err) { <ide> // an error is expected on the second worker <del> process.send('server2:' + err.code); <add> process.send(`server2:${err.code}`); <ide> }); <ide> <ide> server1.listen({ <ide><path>test/parallel/test-net-pipe-connect-errors.js <ide> if (common.isWindows) { <ide> // on CI for a POSIX socket. Even though this isn't actually a socket file, <ide> // the error will be different from the one we are expecting if we exceed the <ide> // limit. <del> emptyTxt = common.tmpDir + '0.txt'; <add> emptyTxt = `${common.tmpDir}0.txt`; <ide> <ide> function cleanup() { <ide> try { <ide><path>test/parallel/test-net-reconnect-error.js <ide> const c = net.createConnection(common.PORT); <ide> c.on('connect', common.mustNotCall('client should not have connected')); <ide> <ide> c.on('error', function(e) { <del> console.error('CLIENT error: ' + e.code); <add> console.error(`CLIENT error: ${e.code}`); <ide> client_error_count++; <ide> assert.strictEqual('ECONNREFUSED', e.code); <ide> }); <ide><path>test/parallel/test-net-reconnect.js <ide> const server = net.createServer(function(socket) { <ide> }); <ide> <ide> socket.on('close', function(had_error) { <del> console.log('SERVER had_error: ' + JSON.stringify(had_error)); <add> console.log(`SERVER had_error: ${JSON.stringify(had_error)}`); <ide> assert.strictEqual(false, had_error); <ide> }); <ide> }); <ide> server.listen(0, function() { <ide> <ide> client.on('data', function(chunk) { <ide> client_recv_count += 1; <del> console.log('client_recv_count ' + client_recv_count); <add> console.log(`client_recv_count ${client_recv_count}`); <ide> assert.strictEqual('hello\r\n', chunk); <ide> console.error('CLIENT: calling end', client._writableState); <ide> client.end(); <ide><path>test/parallel/test-net-server-bind.js <ide> process.on('exit', function() { <ide> let expectedConnectionKey1; <ide> <ide> if (address1.family === 'IPv6') <del> expectedConnectionKey1 = '6::::' + address1.port; <add> expectedConnectionKey1 = `6::::${address1.port}`; <ide> else <del> expectedConnectionKey1 = '4:0.0.0.0:' + address1.port; <add> expectedConnectionKey1 = `4:0.0.0.0:${address1.port}`; <ide> <ide> assert.strictEqual(connectionKey1, expectedConnectionKey1); <ide> assert.strictEqual(common.PORT + 1, address2.port); <ide><path>test/parallel/test-net-server-listen-handle.js <ide> let counter = 0; <ide> <ide> // Avoid conflict with listen-path <ide> function randomPipePath() { <del> return common.PIPE + '-listen-handle-' + (counter++); <add> return `${common.PIPE}-listen-handle-${counter++}`; <ide> } <ide> <ide> function randomHandle(type) { <ide> if (!common.isWindows) { // Windows doesn't support {fd: <n>} <ide> net.createServer() <ide> .listen({fd: fd}, common.mustNotCall()) <ide> .on('error', common.mustCall(function(err) { <del> assert.strictEqual(err + '', 'Error: listen EINVAL'); <add> assert.strictEqual(String(err), 'Error: listen EINVAL'); <ide> this.close(); <ide> })); <ide> } <ide><path>test/parallel/test-net-server-listen-path.js <ide> let counter = 0; <ide> <ide> // Avoid conflict with listen-handle <ide> function randomPipePath() { <del> return common.PIPE + '-listen-path-' + (counter++); <add> return `${common.PIPE}-listen-path-${counter++}`; <ide> } <ide> <ide> // Test listen(path) <ide><path>test/parallel/test-net-server-max-connections-close-makes-more-available.js <ide> const received = []; <ide> const sent = []; <ide> <ide> function createConnection(index) { <del> console.error('creating connection ' + index); <add> console.error(`creating connection ${index}`); <ide> <ide> return new Promise(function(resolve, reject) { <ide> const connection = net.createConnection(server.address().port, function() { <del> const msg = '' + index; <del> console.error('sending message: ' + msg); <add> const msg = String(index); <add> console.error(`sending message: ${msg}`); <ide> this.write(msg); <ide> sent.push(msg); <ide> }); <ide> function createConnection(index) { <ide> }); <ide> <ide> connection.on('data', function(e) { <del> console.error('connection ' + index + ' received response'); <add> console.error(`connection ${index} received response`); <ide> resolve(); <ide> }); <ide> <ide> connection.on('end', function() { <del> console.error('ending ' + index); <add> console.error(`ending ${index}`); <ide> resolve(); <ide> }); <ide> <ide> function createConnection(index) { <ide> } <ide> <ide> function closeConnection(index) { <del> console.error('closing connection ' + index); <add> console.error(`closing connection ${index}`); <ide> return new Promise(function(resolve, reject) { <ide> connections[index].on('end', function() { <ide> resolve(); <ide> function closeConnection(index) { <ide> <ide> const server = net.createServer(function(socket) { <ide> socket.on('data', function(data) { <del> console.error('received message: ' + data); <del> received.push('' + data); <add> console.error(`received message: ${data}`); <add> received.push(String(data)); <ide> socket.write('acknowledged'); <ide> }); <ide> }); <ide><path>test/parallel/test-next-tick-ordering.js <ide> const done = []; <ide> <ide> function get_printer(timeout) { <ide> return function() { <del> console.log('Running from setTimeout ' + timeout); <add> console.log(`Running from setTimeout ${timeout}`); <ide> done.push(timeout); <ide> }; <ide> } <ide><path>test/parallel/test-npm-install.js <ide> const args = [ <ide> <ide> const pkgContent = JSON.stringify({ <ide> dependencies: { <del> 'package-name': common.fixturesDir + '/packages/main' <add> 'package-name': `${common.fixturesDir}/packages/main` <ide> } <ide> }); <ide> <ide> function handleExit(code, signalCode) { <ide> assert.strictEqual(code, 0, `npm install got error code ${code}`); <ide> assert.strictEqual(signalCode, null, `unexpected signal: ${signalCode}`); <ide> assert.doesNotThrow(function() { <del> fs.accessSync(installDir + '/node_modules/package-name'); <add> fs.accessSync(`${installDir}/node_modules/package-name`); <ide> }); <ide> } <ide> <ide><path>test/parallel/test-openssl-ca-options.js <ide> const result = childProcess.spawnSync(process.execPath, [ <ide> '-p', 'process.version'], <ide> {encoding: 'utf8'}); <ide> <del>assert.strictEqual(result.stderr, <del> process.execPath + ': either --use-openssl-ca or ' + <del> '--use-bundled-ca can be used, not both' + os.EOL); <add>assert.strictEqual(result.stderr, `${process.execPath <add> }: either --use-openssl-ca or --use-bundled-ca can be used, not both${os.EOL}` <add>); <ide> assert.strictEqual(result.status, 9); <ide> <ide> const useBundledCA = childProcess.spawnSync(process.execPath, [ <ide><path>test/parallel/test-os.js <ide> if (common.isWindows) { <ide> process.env.TEMP = ''; <ide> assert.strictEqual(os.tmpdir(), '/tmp'); <ide> process.env.TMP = ''; <del> const expected = (process.env.SystemRoot || process.env.windir) + '\\temp'; <add> const expected = `${process.env.SystemRoot || process.env.windir}\\temp`; <ide> assert.strictEqual(os.tmpdir(), expected); <ide> process.env.TEMP = '\\temp\\'; <ide> assert.strictEqual(os.tmpdir(), '\\temp'); <ide><path>test/parallel/test-path-makelong.js <ide> if (common.isWindows) { <ide> const file = path.join(common.fixturesDir, 'a.js'); <ide> const resolvedFile = path.resolve(file); <ide> <del> assert.strictEqual('\\\\?\\' + resolvedFile, path._makeLong(file)); <del> assert.strictEqual('\\\\?\\' + resolvedFile, path._makeLong('\\\\?\\' + <del> file)); <add> assert.strictEqual(`\\\\?\\${resolvedFile}`, path._makeLong(file)); <add> assert.strictEqual(`\\\\?\\${resolvedFile}`, <add> path._makeLong(`\\\\?\\${file}`)); <ide> assert.strictEqual('\\\\?\\UNC\\someserver\\someshare\\somefile', <ide> path._makeLong('\\\\someserver\\someshare\\somefile')); <ide> assert.strictEqual('\\\\?\\UNC\\someserver\\someshare\\somefile', path <ide><path>test/parallel/test-path-parse-format.js <ide> trailingTests.forEach(function(test) { <ide> test[1].forEach(function(test) { <ide> const actual = parse(test[0]); <ide> const expected = test[1]; <del> const fn = `path.${os}.parse(`; <del> const message = fn + <del> JSON.stringify(test[0]) + <del> ')' + <del> '\n expect=' + JSON.stringify(expected) + <del> '\n actual=' + JSON.stringify(actual); <add> const message = `path.${os}.parse(${JSON.stringify(test[0])})\n expect=${ <add> JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`; <ide> const actualKeys = Object.keys(actual); <ide> const expectedKeys = Object.keys(expected); <ide> let failed = (actualKeys.length !== expectedKeys.length); <ide> trailingTests.forEach(function(test) { <ide> } <ide> } <ide> if (failed) <del> failures.push('\n' + message); <add> failures.push(`\n${message}`); <ide> }); <ide> }); <ide> assert.strictEqual(failures.length, 0, failures.join('')); <ide><path>test/parallel/test-path.js <ide> assert.strictEqual(path.win32.dirname('foo'), '.'); <ide> } <ide> const actual = extname(input); <ide> const expected = test[1]; <del> const fn = `path.${os}.extname(`; <del> const message = fn + JSON.stringify(input) + ')' + <del> '\n expect=' + JSON.stringify(expected) + <del> '\n actual=' + JSON.stringify(actual); <add> const message = `path.${os}.extname(${JSON.stringify(input)})\n expect=${ <add> JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`; <ide> if (actual !== expected) <del> failures.push('\n' + message); <add> failures.push(`\n${message}`); <ide> }); <ide> }); <ide> assert.strictEqual(failures.length, 0, failures.join('')); <ide> joinTests.forEach((test) => { <ide> } else { <ide> os = 'posix'; <ide> } <del> const fn = `path.${os}.join(`; <del> const message = fn + test[0].map(JSON.stringify).join(',') + ')' + <del> '\n expect=' + JSON.stringify(expected) + <del> '\n actual=' + JSON.stringify(actual); <add> const message = <add> `path.${os}.join(${test[0].map(JSON.stringify).join(',')})\n expect=${ <add> JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`; <ide> if (actual !== expected && actualAlt !== expected) <ide> failures.push(`\n${message}`); <ide> }); <ide> resolveTests.forEach((test) => { <ide> actualAlt = actual.replace(/\//g, '\\'); <ide> <ide> const expected = test[1]; <del> const fn = `path.${os}.resolve(`; <del> const message = fn + test[0].map(JSON.stringify).join(',') + ')' + <del> '\n expect=' + JSON.stringify(expected) + <del> '\n actual=' + JSON.stringify(actual); <add> const message = <add> `path.${os}.resolve(${test[0].map(JSON.stringify).join(',')})\n expect=${ <add> JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`; <ide> if (actual !== expected && actualAlt !== expected) <del> failures.push('\n' + message); <add> failures.push(`\n${message}`); <ide> }); <ide> }); <ide> assert.strictEqual(failures.length, 0, failures.join('')); <ide> relativeTests.forEach((test) => { <ide> const actual = relative(test[0], test[1]); <ide> const expected = test[2]; <ide> const os = relative === path.win32.relative ? 'win32' : 'posix'; <del> const fn = `path.${os}.relative(`; <del> const message = fn + <del> test.slice(0, 2).map(JSON.stringify).join(',') + <del> ')' + <del> '\n expect=' + JSON.stringify(expected) + <del> '\n actual=' + JSON.stringify(actual); <add> const message = `path.${os}.relative(${ <add> test.slice(0, 2).map(JSON.stringify).join(',')})\n expect=${ <add> JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`; <ide> if (actual !== expected) <ide> failures.push(`\n${message}`); <ide> }); <ide><path>test/parallel/test-preload.js <ide> const nodeBinary = process.argv[0]; <ide> const preloadOption = (preloads) => { <ide> let option = ''; <ide> preloads.forEach(function(preload, index) { <del> option += '-r ' + preload + ' '; <add> option += `-r ${preload} `; <ide> }); <ide> return option; <ide> }; <ide> const fixtureD = fixture('define-global.js'); <ide> const fixtureThrows = fixture('throws_error4.js'); <ide> <ide> // test preloading a single module works <del>childProcess.exec(nodeBinary + ' ' + preloadOption([fixtureA]) + ' ' + fixtureB, <add>childProcess.exec(`${nodeBinary} ${preloadOption([fixtureA])} ${fixtureB}`, <ide> function(err, stdout, stderr) { <ide> assert.ifError(err); <ide> assert.strictEqual(stdout, 'A\nB\n'); <ide> }); <ide> <ide> // test preloading multiple modules works <ide> childProcess.exec( <del> nodeBinary + ' ' + preloadOption([fixtureA, fixtureB]) + ' ' + fixtureC, <add> `${nodeBinary} ${preloadOption([fixtureA, fixtureB])} ${fixtureC}`, <ide> function(err, stdout, stderr) { <ide> assert.ifError(err); <ide> assert.strictEqual(stdout, 'A\nB\nC\n'); <ide> childProcess.exec( <ide> <ide> // test that preloading a throwing module aborts <ide> childProcess.exec( <del> nodeBinary + ' ' + preloadOption([fixtureA, fixtureThrows]) + ' ' + fixtureB, <add> `${nodeBinary} ${preloadOption([fixtureA, fixtureThrows])} ${fixtureB}`, <ide> function(err, stdout, stderr) { <ide> if (err) { <ide> assert.strictEqual(stdout, 'A\n'); <ide> childProcess.exec( <ide> <ide> // test that preload can be used with --eval <ide> childProcess.exec( <del> nodeBinary + ' ' + preloadOption([fixtureA]) + '-e "console.log(\'hello\');"', <add> `${nodeBinary} ${preloadOption([fixtureA])}-e "console.log('hello');"`, <ide> function(err, stdout, stderr) { <ide> assert.ifError(err); <ide> assert.strictEqual(stdout, 'A\nhello\n'); <ide> replProc.on('close', function(code) { <ide> // test that preload placement at other points in the cmdline <ide> // also test that duplicated preload only gets loaded once <ide> childProcess.exec( <del> nodeBinary + ' ' + preloadOption([fixtureA]) + <del> '-e "console.log(\'hello\');" ' + preloadOption([fixtureA, fixtureB]), <add> `${nodeBinary} ${preloadOption([fixtureA])}-e "console.log('hello');" ${ <add> preloadOption([fixtureA, fixtureB])}`, <ide> function(err, stdout, stderr) { <ide> assert.ifError(err); <ide> assert.strictEqual(stdout, 'A\nB\nhello\n'); <ide> childProcess.exec( <ide> <ide> // test that preload works with -i <ide> const interactive = childProcess.exec( <del> nodeBinary + ' ' + preloadOption([fixtureD]) + '-i', <add> `${nodeBinary} ${preloadOption([fixtureD])}-i`, <ide> common.mustCall(function(err, stdout, stderr) { <ide> assert.ifError(err); <ide> assert.strictEqual(stdout, "> 'test'\n> "); <ide> interactive.stdin.write('a\n'); <ide> interactive.stdin.write('process.exit()\n'); <ide> <ide> childProcess.exec( <del> `${nodeBinary} --require ${fixture('cluster-preload.js')} ` + <del> fixture('cluster-preload-test.js'), <add> `${nodeBinary} --require ${fixture('cluster-preload.js')} ${ <add> fixture('cluster-preload-test.js')}`, <ide> function(err, stdout, stderr) { <ide> assert.ifError(err); <ide> assert.ok(/worker terminated with code 43/.test(stdout)); <ide><path>test/parallel/test-process-exit-code.js <ide> function test(arg, exit) { <ide> const f = __filename; <ide> const option = { stdio: [ 0, 1, 'ignore' ] }; <ide> spawn(node, [f, arg], option).on('exit', function(code) { <del> assert.strictEqual(code, exit, 'wrong exit for ' + <del> arg + '\nexpected:' + exit + <del> ' but got:' + code); <add> assert.strictEqual( <add> code, exit, <add> `wrong exit for ${arg}\nexpected:${exit} but got:${code}`); <ide> console.log('ok - %s exited with %d', arg, exit); <ide> }); <ide> } <ide><path>test/parallel/test-process-raw-debug.js <ide> switch (process.argv[2]) { <ide> case undefined: <ide> return parent(); <ide> default: <del> throw new Error('wtf? ' + process.argv[2]); <add> throw new Error(`wtf? ${process.argv[2]}`); <ide> } <ide> <ide> function parent() { <ide> function parent() { <ide> child.stderr.setEncoding('utf8'); <ide> <ide> child.stderr.on('end', function() { <del> assert.strictEqual(output, 'I can still debug!' + os.EOL); <add> assert.strictEqual(output, `I can still debug!${os.EOL}`); <ide> console.log('ok - got expected message'); <ide> }); <ide> <ide><path>test/parallel/test-process-redirect-warnings-env.js <ide> const assert = require('assert'); <ide> <ide> common.refreshTmpDir(); <ide> <del>const warnmod = require.resolve(common.fixturesDir + '/warnings.js'); <add>const warnmod = require.resolve(`${common.fixturesDir}/warnings.js`); <ide> const warnpath = path.join(common.tmpDir, 'warnings.txt'); <ide> <ide> fork(warnmod, {env: {NODE_REDIRECT_WARNINGS: warnpath}}) <ide><path>test/parallel/test-process-redirect-warnings.js <ide> const assert = require('assert'); <ide> <ide> common.refreshTmpDir(); <ide> <del>const warnmod = require.resolve(common.fixturesDir + '/warnings.js'); <add>const warnmod = require.resolve(`${common.fixturesDir}/warnings.js`); <ide> const warnpath = path.join(common.tmpDir, 'warnings.txt'); <ide> <ide> fork(warnmod, {execArgv: [`--redirect-warnings=${warnpath}`]}) <ide><path>test/parallel/test-promises-unhandled-rejections.js <ide> const asyncTest = (function() { <ide> <ide> function fail(error) { <ide> const stack = currentTest ? <del> error.stack + '\nFrom previous event:\n' + currentTest.stack : <add> `${error.stack}\nFrom previous event:\n${currentTest.stack}` : <ide> error.stack; <ide> <ide> if (currentTest) <del> process.stderr.write('\'' + currentTest.description + '\' failed\n\n'); <add> process.stderr.write(`'${currentTest.description}' failed\n\n`); <ide> <ide> process.stderr.write(stack); <ide> process.exit(2); <ide><path>test/parallel/test-querystring-maxKeys-non-finite.js <ide> function createManyParams(count) { <ide> <ide> for (let i = 1; i < count; i++) { <ide> const n = i.toString(36); <del> str += '&' + n + '=' + n; <add> str += `&${n}=${n}`; <ide> } <ide> <ide> return str; <ide><path>test/parallel/test-readline-interface.js <ide> function isWarned(emitter) { <ide> assert.strictEqual(line, expectedLines[callCount]); <ide> callCount++; <ide> }); <del> fi.emit('data', expectedLines.join('\n') + '\n'); <add> fi.emit('data', `${expectedLines.join('\n')}\n`); <ide> assert.strictEqual(callCount, expectedLines.length); <ide> rli.close(); <ide> <ide> function isWarned(emitter) { <ide> callCount++; <ide> }); <ide> expectedLines.forEach(function(line) { <del> fi.emit('data', line + '\r'); <add> fi.emit('data', `${line}\r`); <ide> fi.emit('data', '\n'); <ide> }); <ide> assert.strictEqual(callCount, expectedLines.length); <ide> function isWarned(emitter) { <ide> assert.strictEqual(line, expectedLines[callCount]); <ide> callCount++; <ide> }); <del> fi.emit('data', expectedLines.join('\n') + '\n'); <add> fi.emit('data', `${expectedLines.join('\n')}\n`); <ide> assert.strictEqual(callCount, expectedLines.length); <ide> fi.emit('keypress', '.', { name: 'up' }); // 'bat' <ide> assert.strictEqual(rli.line, expectedLines[--callCount]); <ide> function isWarned(emitter) { <ide> assert.strictEqual(line, expectedLines[callCount]); <ide> callCount++; <ide> }); <del> fi.emit('data', expectedLines.join('\n') + '\n'); <add> fi.emit('data', `${expectedLines.join('\n')}\n`); <ide> assert.strictEqual(callCount, expectedLines.length); <ide> fi.emit('keypress', '.', { name: 'up' }); // 'bat' <ide> assert.strictEqual(rli.line, expectedLines[--callCount]); <ide><path>test/parallel/test-regress-GH-1531.js <ide> const https = require('https'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = https.createServer(options, function(req, res) { <ide><path>test/parallel/test-regress-GH-3739.js <ide> common.refreshTmpDir(); <ide> <ide> // Make a long path. <ide> for (let i = 0; i < 50; i++) { <del> dir = dir + '/1234567890'; <add> dir = `${dir}/1234567890`; <ide> try { <ide> fs.mkdirSync(dir, '0777'); <ide> } catch (e) { <ide><path>test/parallel/test-regress-GH-9819.js <ide> const scripts = [ <ide> <ide> scripts.forEach((script) => { <ide> const node = process.execPath; <del> const code = setup + ';' + script; <add> const code = `${setup};${script}`; <ide> execFile(node, [ '-e', code ], common.mustCall((err, stdout, stderr) => { <ide> assert(stderr.includes('Error: xyz'), 'digest crashes'); <ide> })); <ide><path>test/parallel/test-repl-autolibs.js <ide> function test1() { <ide> if (data.length) { <ide> <ide> // inspect output matches repl output <del> assert.strictEqual(data, util.inspect(require('fs'), null, 2, false) + <del> '\n'); <add> assert.strictEqual(data, <add> `${util.inspect(require('fs'), null, 2, false)}\n`); <ide> // globally added lib matches required lib <ide> assert.strictEqual(global.fs, require('fs')); <ide> test2(); <ide><path>test/parallel/test-repl-definecommand.js <ide> r.defineCommand('say1', { <ide> help: 'help for say1', <ide> action: function(thing) { <ide> output = ''; <del> this.write('hello ' + thing); <add> this.write(`hello ${thing}`); <ide> this.displayPrompt(); <ide> } <ide> }); <ide><path>test/parallel/test-repl-envvars.js <ide> function run(test) { <ide> // The REPL registers 'module' and 'require' globals <ide> common.allowGlobals(repl.context.module, repl.context.require); <ide> <del> assert.strictEqual(expected.terminal, repl.terminal, 'Expected ' + <del> inspect(expected) + ' with ' + inspect(env)); <del> assert.strictEqual(expected.useColors, repl.useColors, 'Expected ' + <del> inspect(expected) + ' with ' + inspect(env)); <add> assert.strictEqual(expected.terminal, repl.terminal, <add> `Expected ${inspect(expected)} with ${inspect(env)}`); <add> assert.strictEqual(expected.useColors, repl.useColors, <add> `Expected ${inspect(expected)} with ${inspect(env)}`); <ide> repl.close(); <ide> }); <ide> } <ide><path>test/parallel/test-repl-mode.js <ide> tests.forEach(function(test) { <ide> function testSloppyMode() { <ide> const cli = initRepl(repl.REPL_MODE_SLOPPY); <ide> <del> cli.input.emit('data', ` <del> x = 3 <del> `.trim() + '\n'); <add> cli.input.emit('data', 'x = 3\n'); <ide> assert.strictEqual(cli.output.accumulator.join(''), '> 3\n> '); <ide> cli.output.accumulator.length = 0; <ide> <del> cli.input.emit('data', ` <del> let y = 3 <del> `.trim() + '\n'); <add> cli.input.emit('data', 'let y = 3\n'); <ide> assert.strictEqual(cli.output.accumulator.join(''), 'undefined\n> '); <ide> } <ide> <ide> function testStrictMode() { <ide> const cli = initRepl(repl.REPL_MODE_STRICT); <ide> <del> cli.input.emit('data', ` <del> x = 3 <del> `.trim() + '\n'); <add> cli.input.emit('data', 'x = 3\n'); <ide> assert.ok(/ReferenceError: x is not defined/.test( <ide> cli.output.accumulator.join(''))); <ide> cli.output.accumulator.length = 0; <ide> <del> cli.input.emit('data', ` <del> let y = 3 <del> `.trim() + '\n'); <add> cli.input.emit('data', 'let y = 3\n'); <ide> assert.strictEqual(cli.output.accumulator.join(''), 'undefined\n> '); <ide> } <ide> <ide> function testAutoMode() { <ide> const cli = initRepl(repl.REPL_MODE_MAGIC); <ide> <del> cli.input.emit('data', ` <del> x = 3 <del> `.trim() + '\n'); <add> cli.input.emit('data', 'x = 3\n'); <ide> assert.strictEqual(cli.output.accumulator.join(''), '> 3\n> '); <ide> cli.output.accumulator.length = 0; <ide> <del> cli.input.emit('data', ` <del> let y = 3 <del> `.trim() + '\n'); <add> cli.input.emit('data', 'let y = 3\n'); <ide> assert.strictEqual(cli.output.accumulator.join(''), 'undefined\n> '); <ide> } <ide> <ide><path>test/parallel/test-repl-persistent-history.js <ide> class ActionStream extends stream.Stream { <ide> if (typeof action === 'object') { <ide> this.emit('keypress', '', action); <ide> } else { <del> this.emit('data', action + '\n'); <add> this.emit('data', `${action}\n`); <ide> } <ide> setImmediate(doAction); <ide> }; <ide> const tests = [ <ide> { <ide> env: { NODE_REPL_HISTORY: historyPath }, <ide> test: [UP, CLEAR], <del> expected: [prompt, prompt + '\'you look fabulous today\'', prompt] <add> expected: [prompt, `${prompt}'you look fabulous today'`, prompt] <ide> }, <ide> { <ide> env: { NODE_REPL_HISTORY: historyPath, <ide> NODE_REPL_HISTORY_FILE: oldHistoryPath }, <ide> test: [UP, CLEAR], <del> expected: [prompt, prompt + '\'you look fabulous today\'', prompt] <add> expected: [prompt, `${prompt}'you look fabulous today'`, prompt] <ide> }, <ide> { <ide> env: { NODE_REPL_HISTORY: historyPath, <ide> NODE_REPL_HISTORY_FILE: '' }, <ide> test: [UP, CLEAR], <del> expected: [prompt, prompt + '\'you look fabulous today\'', prompt] <add> expected: [prompt, `${prompt}'you look fabulous today'`, prompt] <ide> }, <ide> { <ide> env: {}, <ide> const tests = [ <ide> { <ide> env: { NODE_REPL_HISTORY_FILE: oldHistoryPath }, <ide> test: [UP, CLEAR, '\'42\'', ENTER], <del> expected: [prompt, convertMsg, prompt, prompt + '\'=^.^=\'', prompt, '\'', <add> expected: [prompt, convertMsg, prompt, `${prompt}'=^.^='`, prompt, '\'', <ide> '4', '2', '\'', '\'42\'\n', prompt, prompt], <ide> clean: false <ide> }, <ide> { // Requires the above testcase <ide> env: {}, <ide> test: [UP, UP, ENTER], <del> expected: [prompt, prompt + '\'42\'', prompt + '\'=^.^=\'', '\'=^.^=\'\n', <add> expected: [prompt, `${prompt}'42'`, `${prompt}'=^.^='`, '\'=^.^=\'\n', <ide> prompt] <ide> }, <ide> { <ide> env: { NODE_REPL_HISTORY: historyPath, <ide> NODE_REPL_HISTORY_SIZE: 1 }, <ide> test: [UP, UP, CLEAR], <del> expected: [prompt, prompt + '\'you look fabulous today\'', prompt] <add> expected: [prompt, `${prompt}'you look fabulous today'`, prompt] <ide> }, <ide> { <ide> env: { NODE_REPL_HISTORY_FILE: oldHistoryPath, <ide> NODE_REPL_HISTORY_SIZE: 1 }, <ide> test: [UP, UP, UP, CLEAR], <del> expected: [prompt, convertMsg, prompt, prompt + '\'=^.^=\'', prompt] <add> expected: [prompt, convertMsg, prompt, `${prompt}'=^.^='`, prompt] <ide> }, <ide> { <ide> env: { NODE_REPL_HISTORY: historyPathFail, <ide><path>test/parallel/test-repl-save-load.js <ide> const saveFileName = join(common.tmpDir, 'test.save.js'); <ide> putIn.run(testFile); <ide> <ide> // save it to a file <del>putIn.run(['.save ' + saveFileName]); <add>putIn.run([`.save ${saveFileName}`]); <ide> <ide> // the file should have what I wrote <del>assert.strictEqual(fs.readFileSync(saveFileName, 'utf8'), testFile.join('\n') + <del> '\n'); <add>assert.strictEqual(fs.readFileSync(saveFileName, 'utf8'), <add> `${testFile.join('\n')}\n`); <ide> <ide> { <ide> // save .editor mode code <ide> testMe.complete('inner.o', function(error, data) { <ide> putIn.run(['.clear']); <ide> <ide> // Load the file back in <del>putIn.run(['.load ' + saveFileName]); <add>putIn.run([`.load ${saveFileName}`]); <ide> <ide> // make sure that the REPL data is "correct" <ide> testMe.complete('inner.o', function(error, data) { <ide> let loadFile = join(common.tmpDir, 'file.does.not.exist'); <ide> // should not break <ide> putIn.write = function(data) { <ide> // make sure I get a failed to load message and not some crazy error <del> assert.strictEqual(data, 'Failed to load:' + loadFile + '\n'); <add> assert.strictEqual(data, `Failed to load:${loadFile}\n`); <ide> // eat me to avoid work <ide> putIn.write = common.noop; <ide> }; <del>putIn.run(['.load ' + loadFile]); <add>putIn.run([`.load ${loadFile}`]); <ide> <ide> // throw error on loading directory <ide> loadFile = common.tmpDir; <ide> putIn.write = function(data) { <del> assert.strictEqual(data, 'Failed to load:' + loadFile + <del> ' is not a valid file\n'); <add> assert.strictEqual(data, `Failed to load:${loadFile} is not a valid file\n`); <ide> putIn.write = common.noop; <ide> }; <del>putIn.run(['.load ' + loadFile]); <add>putIn.run([`.load ${loadFile}`]); <ide> <ide> // clear the REPL <ide> putIn.run(['.clear']); <ide> const invalidFileName = join(common.tmpDir, '\0\0\0\0\0'); <ide> // should not break <ide> putIn.write = function(data) { <ide> // make sure I get a failed to save message and not some other error <del> assert.strictEqual(data, 'Failed to save:' + invalidFileName + '\n'); <add> assert.strictEqual(data, `Failed to save:${invalidFileName}\n`); <ide> // reset to no-op <ide> putIn.write = common.noop; <ide> }; <ide> <ide> // save it to a file <del>putIn.run(['.save ' + invalidFileName]); <add>putIn.run([`.save ${invalidFileName}`]); <ide><path>test/parallel/test-repl-tab-complete.js <ide> putIn.run(['.clear']); <ide> testMe.complete('require(\'', common.mustCall(function(error, data) { <ide> assert.strictEqual(error, null); <ide> repl._builtinLibs.forEach(function(lib) { <del> assert.notStrictEqual(data[0].indexOf(lib), -1, lib + ' not found'); <add> assert.notStrictEqual(data[0].indexOf(lib), -1, `${lib} not found`); <ide> }); <ide> })); <ide> <ide><path>test/parallel/test-repl.js <ide> console.error('repl test'); <ide> <ide> // function for REPL to run <ide> global.invoke_me = function(arg) { <del> return 'invoked ' + arg; <add> return `invoked ${arg}`; <ide> }; <ide> <ide> function send_expect(list) { <ide> if (list.length > 0) { <ide> const cur = list.shift(); <ide> <del> console.error('sending ' + JSON.stringify(cur.send)); <add> console.error(`sending ${JSON.stringify(cur.send)}`); <ide> <ide> cur.client.expect = cur.expect; <ide> cur.client.list = list; <ide> if (cur.send.length > 0) { <del> cur.client.write(cur.send + '\n'); <add> cur.client.write(`${cur.send}\n`); <ide> } <ide> } <ide> } <ide> function error_test() { <ide> <ide> client_unix.on('data', function(data) { <ide> read_buffer += data.toString('ascii', 0, data.length); <del> console.error('Unix data: ' + JSON.stringify(read_buffer) + ', expecting ' + <del> (client_unix.expect.exec ? <del> client_unix.expect : <del> JSON.stringify(client_unix.expect))); <add> console.error( <add> `Unix data: ${JSON.stringify(read_buffer)}, expecting ${ <add> client_unix.expect.exec ? <add> client_unix.expect : <add> JSON.stringify(client_unix.expect)}`); <ide> <ide> if (read_buffer.includes(prompt_unix)) { <ide> // if it's an exact match, then don't do the regexp <ide> function error_test() { <ide> { client: client_unix, send: 'function blah() { return 1; }', <ide> expect: prompt_unix }, <ide> { client: client_unix, send: 'blah()', <del> expect: '1\n' + prompt_unix }, <add> expect: `1\n${prompt_unix}` }, <ide> // Functions should not evaluate twice (#2773) <ide> { client: client_unix, send: 'var I = [1,2,3,function() {}]; I.pop()', <ide> expect: '[Function]' }, <ide> function error_test() { <ide> { client: client_unix, send: '2)', <ide> expect: prompt_multiline }, <ide> { client: client_unix, send: ')', <del> expect: 'undefined\n' + prompt_unix }, <add> expect: `undefined\n${prompt_unix}` }, <ide> // npm prompt error message <ide> { client: client_unix, send: 'npm install foobar', <ide> expect: expect_npm }, <ide> function error_test() { <ide> // this makes sure that we don't print `undefined` when we actually print <ide> // the error message <ide> { client: client_unix, send: '.invalid_repl_command', <del> expect: 'Invalid REPL keyword\n' + prompt_unix }, <add> expect: `Invalid REPL keyword\n${prompt_unix}` }, <ide> // this makes sure that we don't crash when we use an inherited property as <ide> // a REPL command <ide> { client: client_unix, send: '.toString', <del> expect: 'Invalid REPL keyword\n' + prompt_unix }, <add> expect: `Invalid REPL keyword\n${prompt_unix}` }, <ide> // fail when we are not inside a String and a line continuation is used <ide> { client: client_unix, send: '[] \\', <ide> expect: /\bSyntaxError: Invalid or unexpected token/ }, <ide> // do not fail when a String is created with line continuation <ide> { client: client_unix, send: '\'the\\\nfourth\\\neye\'', <del> expect: prompt_multiline + prompt_multiline + <del> '\'thefourtheye\'\n' + prompt_unix }, <add> expect: `${prompt_multiline}${prompt_multiline}'thefourtheye'\n${ <add> prompt_unix}` }, <ide> // Don't fail when a partial String is created and line continuation is used <ide> // with whitespace characters at the end of the string. We are to ignore it. <ide> // This test is to make sure that we properly remove the whitespace <ide> function error_test() { <ide> expect: prompt_unix }, <ide> // multiline strings preserve whitespace characters in them <ide> { client: client_unix, send: '\'the \\\n fourth\t\t\\\n eye \'', <del> expect: prompt_multiline + prompt_multiline + <del> '\'the fourth\\t\\t eye \'\n' + prompt_unix }, <add> expect: `${prompt_multiline}${ <add> prompt_multiline}'the fourth\\t\\t eye '\n${prompt_unix}` }, <ide> // more than one multiline strings also should preserve whitespace chars <ide> { client: client_unix, send: '\'the \\\n fourth\' + \'\t\t\\\n eye \'', <del> expect: prompt_multiline + prompt_multiline + <del> '\'the fourth\\t\\t eye \'\n' + prompt_unix }, <add> expect: `${prompt_multiline}${ <add> prompt_multiline}'the fourth\\t\\t eye '\n${prompt_unix}` }, <ide> // using REPL commands within a string literal should still work <ide> { client: client_unix, send: '\'\\\n.break', <ide> expect: prompt_unix }, <ide> function error_test() { <ide> expect: prompt_unix + prompt_unix + prompt_unix }, <ide> // empty lines in the string literals should not affect the string <ide> { client: client_unix, send: '\'the\\\n\\\nfourtheye\'\n', <del> expect: prompt_multiline + prompt_multiline + <del> '\'thefourtheye\'\n' + prompt_unix }, <add> expect: `${prompt_multiline}${ <add> prompt_multiline}'thefourtheye'\n${prompt_unix}` }, <ide> // Regression test for https://github.com/nodejs/node/issues/597 <ide> { client: client_unix, <ide> send: '/(.)(.)(.)(.)(.)(.)(.)(.)(.)/.test(\'123456789\')\n', <ide> function error_test() { <ide> '\'7\'\n', '\'8\'\n', '\'9\'\n'].join(`${prompt_unix}`) }, <ide> // regression tests for https://github.com/nodejs/node/issues/2749 <ide> { client: client_unix, send: 'function x() {\nreturn \'\\n\';\n }', <del> expect: prompt_multiline + prompt_multiline + <del> 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}${prompt_multiline}undefined\n${ <add> prompt_unix}` }, <ide> { client: client_unix, send: 'function x() {\nreturn \'\\\\\';\n }', <del> expect: prompt_multiline + prompt_multiline + <del> 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}${prompt_multiline}undefined\n${ <add> prompt_unix}` }, <ide> // regression tests for https://github.com/nodejs/node/issues/3421 <ide> { client: client_unix, send: 'function x() {\n//\'\n }', <del> expect: prompt_multiline + prompt_multiline + <del> 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}${prompt_multiline}undefined\n${ <add> prompt_unix}` }, <ide> { client: client_unix, send: 'function x() {\n//"\n }', <del> expect: prompt_multiline + prompt_multiline + <del> 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}${prompt_multiline}undefined\n${ <add> prompt_unix}` }, <ide> { client: client_unix, send: 'function x() {//\'\n }', <del> expect: prompt_multiline + 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}undefined\n${prompt_unix}` }, <ide> { client: client_unix, send: 'function x() {//"\n }', <del> expect: prompt_multiline + 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}undefined\n${prompt_unix}` }, <ide> { client: client_unix, send: 'function x() {\nvar i = "\'";\n }', <del> expect: prompt_multiline + prompt_multiline + <del> 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}${prompt_multiline}undefined\n${ <add> prompt_unix}` }, <ide> { client: client_unix, send: 'function x(/*optional*/) {}', <del> expect: 'undefined\n' + prompt_unix }, <add> expect: `undefined\n${prompt_unix}` }, <ide> { client: client_unix, send: 'function x(/* // 5 */) {}', <del> expect: 'undefined\n' + prompt_unix }, <add> expect: `undefined\n${prompt_unix}` }, <ide> { client: client_unix, send: '// /* 5 */', <del> expect: 'undefined\n' + prompt_unix }, <add> expect: `undefined\n${prompt_unix}` }, <ide> { client: client_unix, send: '"//"', <del> expect: '\'//\'\n' + prompt_unix }, <add> expect: `'//'\n${prompt_unix}` }, <ide> { client: client_unix, send: '"data /*with*/ comment"', <del> expect: '\'data /*with*/ comment\'\n' + prompt_unix }, <add> expect: `'data /*with*/ comment'\n${prompt_unix}` }, <ide> { client: client_unix, send: 'function x(/*fn\'s optional params*/) {}', <del> expect: 'undefined\n' + prompt_unix }, <add> expect: `undefined\n${prompt_unix}` }, <ide> { client: client_unix, send: '/* \'\n"\n\'"\'\n*/', <del> expect: 'undefined\n' + prompt_unix }, <add> expect: `undefined\n${prompt_unix}` }, <ide> // REPL should get a normal require() function, not one that allows <ide> // access to internal modules without the --expose_internals flag. <ide> { client: client_unix, send: 'require("internal/repl")', <ide> expect: /^Error: Cannot find module 'internal\/repl'/ }, <ide> // REPL should handle quotes within regexp literal in multiline mode <ide> { client: client_unix, <ide> send: "function x(s) {\nreturn s.replace(/'/,'');\n}", <del> expect: prompt_multiline + prompt_multiline + <del> 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}${prompt_multiline}` + <add> `undefined\n${prompt_unix}` }, <ide> { client: client_unix, <ide> send: "function x(s) {\nreturn s.replace(/'/,'');\n}", <del> expect: prompt_multiline + prompt_multiline + <del> 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}${prompt_multiline}` + <add> `undefined\n${prompt_unix}` }, <ide> { client: client_unix, <ide> send: 'function x(s) {\nreturn s.replace(/"/,"");\n}', <del> expect: prompt_multiline + prompt_multiline + <del> 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}${prompt_multiline}` + <add> `undefined\n${prompt_unix}` }, <ide> { client: client_unix, <ide> send: 'function x(s) {\nreturn s.replace(/.*/,"");\n}', <del> expect: prompt_multiline + prompt_multiline + <del> 'undefined\n' + prompt_unix }, <add> expect: `${prompt_multiline}${prompt_multiline}` + <add> `undefined\n${prompt_unix}` }, <ide> { client: client_unix, send: '{ var x = 4; }', <del> expect: 'undefined\n' + prompt_unix }, <add> expect: `undefined\n${prompt_unix}` }, <ide> // Illegal token is not recoverable outside string literal, RegExp literal, <ide> // or block comment. https://github.com/nodejs/node/issues/3611 <ide> { client: client_unix, send: 'a = 3.5e', <ide> expect: /\bSyntaxError: Invalid or unexpected token/ }, <ide> // Mitigate https://github.com/nodejs/node/issues/548 <ide> { client: client_unix, send: 'function name(){ return "node"; };name()', <del> expect: "'node'\n" + prompt_unix }, <add> expect: `'node'\n${prompt_unix}` }, <ide> { client: client_unix, send: 'function name(){ return "nodejs"; };name()', <del> expect: "'nodejs'\n" + prompt_unix }, <add> expect: `'nodejs'\n${prompt_unix}` }, <ide> // Avoid emitting repl:line-number for SyntaxError <ide> { client: client_unix, send: 'a = 3.5e', <ide> expect: /^(?!repl)/ }, <ide> function error_test() { <ide> <ide> { <ide> client: client_unix, send: '(function() {\nreturn /foo/ / /bar/;\n}())', <del> expect: prompt_multiline + prompt_multiline + 'NaN\n' + prompt_unix <add> expect: `${prompt_multiline}${prompt_multiline}NaN\n${prompt_unix}` <ide> }, <ide> <ide> { <ide> client: client_unix, send: '(function() {\nif (false) {} /bar"/;\n}())', <del> expect: prompt_multiline + prompt_multiline + 'undefined\n' + prompt_unix <add> expect: `${prompt_multiline}${prompt_multiline}undefined\n${prompt_unix}` <ide> }, <ide> <ide> // Newline within template string maintains whitespace. <ide> { client: client_unix, send: '`foo \n`', <del> expect: prompt_multiline + '\'foo \\n\'\n' + prompt_unix }, <add> expect: `${prompt_multiline}'foo \\n'\n${prompt_unix}` }, <ide> // Whitespace is not evaluated. <ide> { client: client_unix, send: ' \t \n', <ide> expect: prompt_unix } <ide> function tcp_test() { <ide> { client: client_tcp, send: 'a += 1', <ide> expect: (`12346\n${prompt_tcp}`) }, <ide> { client: client_tcp, <del> send: 'require(' + JSON.stringify(moduleFilename) + ').number', <add> send: `require(${JSON.stringify(moduleFilename)}).number`, <ide> expect: (`42\n${prompt_tcp}`) } <ide> ]); <ide> }); <ide> <ide> client_tcp.on('data', function(data) { <ide> read_buffer += data.toString('ascii', 0, data.length); <del> console.error('TCP data: ' + JSON.stringify(read_buffer) + <del> ', expecting ' + JSON.stringify(client_tcp.expect)); <add> console.error(`TCP data: ${JSON.stringify(read_buffer)}, expecting ${ <add> JSON.stringify(client_tcp.expect)}`); <ide> if (read_buffer.includes(prompt_tcp)) { <ide> assert.strictEqual(client_tcp.expect, read_buffer); <ide> console.error('match'); <ide> function unix_test() { <ide> <ide> client_unix.on('data', function(data) { <ide> read_buffer += data.toString('ascii', 0, data.length); <del> console.error('Unix data: ' + JSON.stringify(read_buffer) + <del> ', expecting ' + JSON.stringify(client_unix.expect)); <add> console.error(`Unix data: ${JSON.stringify(read_buffer)}, expecting ${ <add> JSON.stringify(client_unix.expect)}`); <ide> if (read_buffer.includes(prompt_unix)) { <ide> assert.strictEqual(client_unix.expect, read_buffer); <ide> console.error('match'); <ide><path>test/parallel/test-require-dot.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> const m = require('module'); <ide> <del>const a = require(common.fixturesDir + '/module-require/relative/dot.js'); <del>const b = require(common.fixturesDir + '/module-require/relative/dot-slash.js'); <add>const a = require(`${common.fixturesDir}/module-require/relative/dot.js`); <add>const b = require(`${common.fixturesDir}/module-require/relative/dot-slash.js`); <ide> <ide> assert.strictEqual(a.value, 42); <ide> assert.strictEqual(a, b, 'require(".") should resolve like require("./")'); <ide> <del>process.env.NODE_PATH = common.fixturesDir + '/module-require/relative'; <add>process.env.NODE_PATH = `${common.fixturesDir}/module-require/relative`; <ide> m._initPaths(); <ide> <ide> const c = require('.'); <ide><path>test/parallel/test-require-exceptions.js <ide> const assert = require('assert'); <ide> <ide> // A module with an error in it should throw <ide> assert.throws(function() { <del> require(common.fixturesDir + '/throws_error'); <add> require(`${common.fixturesDir}/throws_error`); <ide> }, /^Error: blah$/); <ide> <ide> // Requiring the same module again should throw as well <ide> assert.throws(function() { <del> require(common.fixturesDir + '/throws_error'); <add> require(`${common.fixturesDir}/throws_error`); <ide> }, /^Error: blah$/); <ide> <ide> // Requiring a module that does not exist should throw an <ide><path>test/parallel/test-signal-handler.js <ide> if (common.isWindows) { <ide> return; <ide> } <ide> <del>console.log('process.pid: ' + process.pid); <add>console.log(`process.pid: ${process.pid}`); <ide> <ide> process.on('SIGUSR1', common.mustCall()); <ide> <ide> process.on('SIGUSR1', common.mustCall(function() { <ide> <ide> let i = 0; <ide> setInterval(function() { <del> console.log('running process...' + ++i); <add> console.log(`running process...${++i}`); <ide> <ide> if (i === 5) { <ide> process.kill(process.pid, 'SIGUSR1'); <ide><path>test/parallel/test-signal-unregister.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> const spawn = require('child_process').spawn; <ide> <del>const child = spawn(process.argv[0], [common.fixturesDir + '/should_exit.js']); <add>const child = spawn(process.argv[0], [`${common.fixturesDir}/should_exit.js`]); <ide> child.stdout.once('data', function() { <ide> child.kill('SIGINT'); <ide> }); <ide><path>test/parallel/test-socket-write-after-fin-error.js <ide> let gotServerError = false; <ide> const server = net.createServer(function(sock) { <ide> sock.setEncoding('utf8'); <ide> sock.on('error', function(er) { <del> console.error(er.code + ': ' + er.message); <add> console.error(`${er.code}: ${er.message}`); <ide> gotServerError = er; <ide> }); <ide> <ide><path>test/parallel/test-spawn-cmd-named-pipe.js <ide> if (!process.argv[2]) { <ide> const spawn = require('child_process').spawn; <ide> const path = require('path'); <ide> <del> const pipeNamePrefix = path.basename(__filename) + '.' + process.pid; <del> const stdinPipeName = '\\\\.\\pipe\\' + pipeNamePrefix + '.stdin'; <del> const stdoutPipeName = '\\\\.\\pipe\\' + pipeNamePrefix + '.stdout'; <add> const pipeNamePrefix = `${path.basename(__filename)}.${process.pid}`; <add> const stdinPipeName = `\\\\.\\pipe\\${pipeNamePrefix}.stdin`; <add> const stdoutPipeName = `\\\\.\\pipe\\${pipeNamePrefix}.stdout`; <ide> <ide> const stdinPipeServer = net.createServer(function(c) { <ide> c.on('end', common.mustCall(function() { <ide><path>test/parallel/test-stdin-from-file.js <ide> const fs = require('fs'); <ide> const stdoutScript = join(common.fixturesDir, 'echo-close-check.js'); <ide> const tmpFile = join(common.tmpDir, 'stdin.txt'); <ide> <del>const cmd = '"' + process.argv[0] + '" "' + stdoutScript + '" < "' + <del> tmpFile + '"'; <add>const cmd = `"${process.argv[0]}" "${stdoutScript}" < "${tmpFile}"`; <ide> <ide> const string = 'abc\nümlaut.\nsomething else\n' + <ide> '南越国是前203年至前111年存在于岭南地区的一个国家,国都位于番禺,' + <ide> const string = 'abc\nümlaut.\nsomething else\n' + <ide> <ide> common.refreshTmpDir(); <ide> <del>console.log(cmd + '\n\n'); <add>console.log(`${cmd}\n\n`); <ide> <ide> fs.writeFileSync(tmpFile, string); <ide> <ide> childProcess.exec(cmd, common.mustCall(function(err, stdout, stderr) { <ide> fs.unlinkSync(tmpFile); <ide> <ide> assert.ifError(err); <del> assert.strictEqual(stdout, 'hello world\r\n' + string); <add> assert.strictEqual(stdout, `hello world\r\n${string}`); <ide> assert.strictEqual('', stderr); <ide> })); <ide><path>test/parallel/test-stdin-script-child.js <ide> const child = spawn(process.execPath, [], { <ide> NODE_DEBUG: process.argv[2] <ide> }) <ide> }); <del>const wanted = child.pid + '\n'; <add>const wanted = `${child.pid}\n`; <ide> let found = ''; <ide> <ide> child.stdout.setEncoding('utf8'); <ide> child.stdout.on('data', function(c) { <ide> <ide> child.stderr.setEncoding('utf8'); <ide> child.stderr.on('data', function(c) { <del> console.error('> ' + c.trim().split(/\n/).join('\n> ')); <add> console.error(`> ${c.trim().split(/\n/).join('\n> ')}`); <ide> }); <ide> <ide> child.on('close', common.mustCall(function(c) { <ide><path>test/parallel/test-stdout-stderr-reading.js <ide> function parent() { <ide> }); <ide> c1.stderr.setEncoding('utf8'); <ide> c1.stderr.on('data', function(chunk) { <del> console.error('c1err: ' + chunk.split('\n').join('\nc1err: ')); <add> console.error(`c1err: ${chunk.split('\n').join('\nc1err: ')}`); <ide> }); <ide> c1.on('close', common.mustCall(function(code, signal) { <ide> assert(!code); <ide> function parent() { <ide> }); <ide> c1.stderr.setEncoding('utf8'); <ide> c1.stderr.on('data', function(chunk) { <del> console.error('c1err: ' + chunk.split('\n').join('\nc1err: ')); <add> console.error(`c1err: ${chunk.split('\n').join('\nc1err: ')}`); <ide> }); <ide> c2.on('close', common.mustCall(function(code, signal) { <ide> assert(!code); <ide><path>test/parallel/test-stdout-to-file.js <ide> const tmpFile = path.join(common.tmpDir, 'stdout.txt'); <ide> common.refreshTmpDir(); <ide> <ide> function test(size, useBuffer, cb) { <del> const cmd = '"' + process.argv[0] + '"' + <del> ' ' + <del> '"' + (useBuffer ? scriptBuffer : scriptString) + '"' + <del> ' ' + <del> size + <del> ' > ' + <del> '"' + tmpFile + '"'; <add> const cmd = `"${process.argv[0]}" "${ <add> useBuffer ? scriptBuffer : scriptString}" ${size} > "${tmpFile}"`; <ide> <ide> try { <ide> fs.unlinkSync(tmpFile); <ide><path>test/parallel/test-stream-base-no-abort.js <ide> async_wrap.enable(); <ide> <ide> const checkTLS = common.mustCall(function checkTLS() { <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/ec-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/ec-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/ec-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/ec-cert.pem`) <ide> }; <ide> const server = tls.createServer(options, common.noop) <ide> .listen(0, function() { <ide><path>test/parallel/test-stream-push-strings.js <ide> const results = []; <ide> ms.on('readable', function() { <ide> let chunk; <ide> while (null !== (chunk = ms.read())) <del> results.push(chunk + ''); <add> results.push(String(chunk)); <ide> }); <ide> <ide> const expect = [ 'first chunksecond to last chunk', 'last chunk' ]; <ide><path>test/parallel/test-stream-readable-flow-recursion.js <ide> function flow(stream, size, callback) { <ide> callback(chunk); <ide> <ide> depth -= 1; <del> console.log('flow(' + depth + '): exit'); <add> console.log(`flow(${depth}): exit`); <ide> } <ide> <ide> flow(stream, 5000, function() { <del> console.log('complete (' + depth + ')'); <add> console.log(`complete (${depth})`); <ide> }); <ide> <ide> process.on('exit', function(code) { <ide><path>test/parallel/test-stream2-base64-single-char-read-end.js <ide> dst._write = function(chunk, enc, cb) { <ide> }; <ide> <ide> src.on('end', function() { <del> assert.strictEqual(Buffer.concat(accum) + '', 'MQ=='); <add> assert.strictEqual(String(Buffer.concat(accum)), 'MQ=='); <ide> clearTimeout(timeout); <ide> }); <ide> <ide><path>test/parallel/test-stream2-readable-empty-buffer-no-eof.js <ide> function test1() { <ide> function flow() { <ide> let chunk; <ide> while (null !== (chunk = r.read())) <del> results.push(chunk + ''); <add> results.push(String(chunk)); <ide> } <ide> r.on('readable', flow); <ide> r.on('end', function() { <ide> function test2() { <ide> function flow() { <ide> let chunk; <ide> while (null !== (chunk = r.read())) <del> results.push(chunk + ''); <add> results.push(String(chunk)); <ide> } <ide> r.on('readable', flow); <ide> r.on('end', function() { <ide><path>test/parallel/test-stream2-transform.js <ide> test('passthrough event emission', function(t) { <ide> t.equal(emits, 1); <ide> <ide> t.equal(pt.read(5).toString(), 'foogb'); <del> t.equal(pt.read(5) + '', 'null'); <add> t.equal(String(pt.read(5)), 'null'); <ide> <ide> console.error('need emit 1'); <ide> <ide><path>test/parallel/test-stream2-writable.js <ide> test('write callbacks', function(t) { <ide> callbacks._called[i] = chunk; <ide> }]; <ide> }).reduce(function(set, x) { <del> set['callback-' + x[0]] = x[1]; <add> set[`callback-${x[0]}`] = x[1]; <ide> return set; <ide> }, {}); <ide> callbacks._called = []; <ide> test('write callbacks', function(t) { <ide> }); <ide> <ide> chunks.forEach(function(chunk, i) { <del> tw.write(chunk, callbacks['callback-' + i]); <add> tw.write(chunk, callbacks[`callback-${i}`]); <ide> }); <ide> tw.end(); <ide> }); <ide><path>test/parallel/test-string-decoder.js <ide> function test(encoding, input, expected, singleSequence) { <ide> function unicodeEscape(str) { <ide> let r = ''; <ide> for (let i = 0; i < str.length; i++) { <del> r += '\\u' + str.charCodeAt(i).toString(16); <add> r += `\\u${str.charCodeAt(i).toString(16)}`; <ide> } <ide> return r; <ide> } <ide><path>test/parallel/test-tcp-wrap-listen.js <ide> server.onconnection = (err, client) => { <ide> assert.strictEqual(returnCode, 0); <ide> client.pendingWrites.push(req); <ide> <del> console.log('client.writeQueueSize: ' + client.writeQueueSize); <add> console.log(`client.writeQueueSize: ${client.writeQueueSize}`); <ide> // 11 bytes should flush <ide> assert.strictEqual(0, client.writeQueueSize); <ide> <ide> server.onconnection = (err, client) => { <ide> assert.strictEqual(client, client_); <ide> assert.strictEqual(req, req_); <ide> <del> console.log('client.writeQueueSize: ' + client.writeQueueSize); <add> console.log(`client.writeQueueSize: ${client.writeQueueSize}`); <ide> assert.strictEqual(0, client.writeQueueSize); <ide> <ide> maybeCloseClient(); <ide><path>test/parallel/test-timers-ordering.js <ide> let last_ts = 0; <ide> function f(i) { <ide> if (i <= N) { <ide> // check order <del> assert.strictEqual(i, last_i + 1, 'order is broken: ' + i + ' != ' + <del> last_i + ' + 1'); <add> assert.strictEqual(i, last_i + 1, `order is broken: ${i} != ${last_i} + 1`); <ide> last_i = i; <ide> <ide> // check that this iteration is fired at least 1ms later than the previous <ide> const now = Timer.now(); <ide> console.log(i, now); <ide> assert(now >= last_ts + 1, <del> 'current ts ' + now + ' < prev ts ' + last_ts + ' + 1'); <add> `current ts ${now} < prev ts ${last_ts} + 1`); <ide> last_ts = now; <ide> <ide> // schedule next iteration <ide><path>test/parallel/test-tls-0-dns-altname.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const server = tls.createServer({ <del> key: fs.readFileSync(common.fixturesDir + '/0-dns/0-dns-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/0-dns/0-dns-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/0-dns/0-dns-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/0-dns/0-dns-cert.pem`) <ide> }, function(c) { <ide> c.once('data', function() { <ide> c.destroy(); <ide><path>test/parallel/test-tls-alert-handling.js <ide> const net = require('net'); <ide> const fs = require('fs'); <ide> <ide> function filenamePEM(n) { <del> return require('path').join(common.fixturesDir, 'keys', n + '.pem'); <add> return require('path').join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> function loadPEM(n) { <ide><path>test/parallel/test-tls-alert.js <ide> const spawn = require('child_process').spawn; <ide> let success = false; <ide> <ide> function filenamePEM(n) { <del> return require('path').join(common.fixturesDir, 'keys', n + '.pem'); <add> return require('path').join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> function loadPEM(n) { <ide><path>test/parallel/test-tls-alpn-server-client.js <ide> const fs = require('fs'); <ide> const tls = require('tls'); <ide> <ide> function filenamePEM(n) { <del> return require('path').join(common.fixturesDir, 'keys', n + '.pem'); <add> return require('path').join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> function loadPEM(n) { <ide><path>test/parallel/test-tls-ca-concat.js <ide> const { <ide> connect({ <ide> client: { <ide> checkServerIdentity: (servername, cert) => { }, <del> ca: keys.agent1.cert + '\n' + keys.agent6.ca, <add> ca: `${keys.agent1.cert}\n${keys.agent6.ca}`, <ide> }, <ide> server: { <ide> cert: keys.agent6.cert, <ide><path>test/parallel/test-tls-client-destroy-soon.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`) <ide> }; <ide> <ide> const big = Buffer.alloc(2 * 1024 * 1024, 'Y'); <ide><path>test/parallel/test-tls-client-getephemeralkeyinfo.js <ide> if (!common.hasCrypto) { <ide> const tls = require('tls'); <ide> <ide> const fs = require('fs'); <del>const key = fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'); <del>const cert = fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'); <add>const key = fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`); <add>const cert = fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`); <ide> <ide> let ntests = 0; <ide> let nsuccess = 0; <ide> <ide> function loadDHParam(n) { <ide> let path = common.fixturesDir; <ide> if (n !== 'error') path += '/keys'; <del> return fs.readFileSync(path + '/dh' + n + '.pem'); <add> return fs.readFileSync(`${path}/dh${n}.pem`); <ide> } <ide> <ide> const cipherlist = { <ide><path>test/parallel/test-tls-client-mindhsize.js <ide> if (!common.hasCrypto) { <ide> const tls = require('tls'); <ide> <ide> const fs = require('fs'); <del>const key = fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'); <del>const cert = fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'); <add>const key = fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`); <add>const cert = fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`); <ide> <ide> let nsuccess = 0; <ide> let nerror = 0; <ide> <ide> function loadDHParam(n) { <ide> let path = common.fixturesDir; <ide> if (n !== 'error') path += '/keys'; <del> return fs.readFileSync(path + '/dh' + n + '.pem'); <add> return fs.readFileSync(`${path}/dh${n}.pem`); <ide> } <ide> <ide> function test(size, err, next) { <ide><path>test/parallel/test-tls-client-resume.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`) <ide> }; <ide> <ide> // create server <ide><path>test/parallel/test-tls-client-verify.js <ide> const testCases = <ide> ]; <ide> <ide> function filenamePEM(n) { <del> return require('path').join(common.fixturesDir, 'keys', n + '.pem'); <add> return require('path').join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> <ide> function testServers(index, servers, clientOptions, cb) { <ide> const authorized = client.authorized || <ide> hosterr.test(client.authorizationError); <ide> <del> console.error('expected: ' + ok + ' authed: ' + authorized); <add> console.error(`expected: ${ok} authed: ${authorized}`); <ide> <ide> assert.strictEqual(ok, authorized); <ide> server.close(); <ide><path>test/parallel/test-tls-close-error.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const server = tls.createServer({ <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }, function(c) { <ide> }).listen(0, common.mustCall(function() { <ide> const c = tls.connect(this.address().port, common.mustNotCall()); <ide><path>test/parallel/test-tls-close-notify.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const server = tls.createServer({ <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }, function(c) { <ide> // Send close-notify without shutting down TCP socket <ide> if (c._handle.shutdownSSL() !== 1) <ide><path>test/parallel/test-tls-cnnic-whitelist.js <ide> const fs = require('fs'); <ide> const path = require('path'); <ide> <ide> function filenamePEM(n) { <del> return path.join(common.fixturesDir, 'keys', n + '.pem'); <add> return path.join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> function loadPEM(n) { <ide><path>test/parallel/test-tls-connect-pipe.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> common.refreshTmpDir(); <ide><path>test/parallel/test-tls-connect-simple.js <ide> const fs = require('fs'); <ide> let serverConnected = 0; <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = tls.Server(options, common.mustCall(function(socket) { <ide><path>test/parallel/test-tls-connect-stream-writes.js <ide> const stream = require('stream'); <ide> const net = require('net'); <ide> <ide> const cert_dir = common.fixturesDir; <del>const options = { key: fs.readFileSync(cert_dir + '/test_key.pem'), <del> cert: fs.readFileSync(cert_dir + '/test_cert.pem'), <del> ca: [ fs.readFileSync(cert_dir + '/test_ca.pem') ], <add>const options = { key: fs.readFileSync(`${cert_dir}/test_key.pem`), <add> cert: fs.readFileSync(`${cert_dir}/test_cert.pem`), <add> ca: [ fs.readFileSync(`${cert_dir}/test_ca.pem`) ], <ide> ciphers: 'AES256-GCM-SHA384' }; <ide> const content = 'hello world'; <ide> const recv_bufs = []; <ide><path>test/parallel/test-tls-delayed-attach-error.js <ide> const net = require('net'); <ide> const bonkers = Buffer.alloc(1024, 42); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = net.createServer(common.mustCall(function(c) { <ide><path>test/parallel/test-tls-delayed-attach.js <ide> const sent = 'hello world'; <ide> let received = ''; <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = net.createServer(function(c) { <ide><path>test/parallel/test-tls-dhe.js <ide> const tls = require('tls'); <ide> <ide> const spawn = require('child_process').spawn; <ide> const fs = require('fs'); <del>const key = fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'); <del>const cert = fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'); <add>const key = fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`); <add>const cert = fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`); <ide> let nsuccess = 0; <ide> let ntests = 0; <ide> const ciphers = 'DHE-RSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256'; <ide> common.expectWarning('SecurityWarning', <ide> function loadDHParam(n) { <ide> let path = common.fixturesDir; <ide> if (n !== 'error') path += '/keys'; <del> return fs.readFileSync(path + '/dh' + n + '.pem'); <add> return fs.readFileSync(`${path}/dh${n}.pem`); <ide> } <ide> <ide> function test(keylen, expectedCipher, cb) { <ide> function test(keylen, expectedCipher, cb) { <ide> client.stdout.on('end', function() { <ide> // DHE key length can be checked -brief option in s_client but it <ide> // is only supported in openssl 1.0.2 so we cannot check it. <del> const reg = new RegExp('Cipher : ' + expectedCipher); <add> const reg = new RegExp(`Cipher : ${expectedCipher}`); <ide> if (reg.test(out)) { <ide> nsuccess++; <ide> server.close(); <ide><path>test/parallel/test-tls-ecdh-disable.js <ide> const exec = require('child_process').exec; <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`), <ide> ciphers: 'ECDHE-RSA-RC4-SHA', <ide> ecdhCurve: false <ide> }; <ide> <ide> const server = tls.createServer(options, common.mustNotCall()); <ide> <ide> server.listen(0, '127.0.0.1', common.mustCall(function() { <del> let cmd = '"' + common.opensslCli + '" s_client -cipher ' + options.ciphers + <del> ` -connect 127.0.0.1:${this.address().port}`; <add> let cmd = `"${common.opensslCli}" s_client -cipher ${ <add> options.ciphers} -connect 127.0.0.1:${this.address().port}`; <ide> <ide> // for the performance and stability issue in s_client on Windows <ide> if (common.isWindows) <ide><path>test/parallel/test-tls-ecdh.js <ide> const exec = require('child_process').exec; <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`), <ide> ciphers: '-ALL:ECDHE-RSA-AES128-SHA256', <ide> ecdhCurve: 'prime256v1' <ide> }; <ide> const server = tls.createServer(options, common.mustCall(function(conn) { <ide> })); <ide> <ide> server.listen(0, '127.0.0.1', common.mustCall(function() { <del> let cmd = '"' + common.opensslCli + '" s_client -cipher ' + options.ciphers + <del> ` -connect 127.0.0.1:${this.address().port}`; <add> let cmd = `"${common.opensslCli}" s_client -cipher ${ <add> options.ciphers} -connect 127.0.0.1:${this.address().port}`; <ide> <ide> // for the performance and stability issue in s_client on Windows <ide> if (common.isWindows) <ide><path>test/parallel/test-tls-env-bad-extra-ca.js <ide> if (process.env.CHILD) { <ide> <ide> const env = { <ide> CHILD: 'yes', <del> NODE_EXTRA_CA_CERTS: common.fixturesDir + '/no-such-file-exists', <add> NODE_EXTRA_CA_CERTS: `${common.fixturesDir}/no-such-file-exists`, <ide> }; <ide> <ide> const opts = { <ide><path>test/parallel/test-tls-env-extra-ca.js <ide> if (process.env.CHILD) { <ide> } <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <ide> }; <ide> <ide> const server = tls.createServer(options, function(s) { <ide> const server = tls.createServer(options, function(s) { <ide> const env = { <ide> CHILD: 'yes', <ide> PORT: this.address().port, <del> NODE_EXTRA_CA_CERTS: common.fixturesDir + '/keys/ca1-cert.pem', <add> NODE_EXTRA_CA_CERTS: `${common.fixturesDir}/keys/ca1-cert.pem`, <ide> }; <ide> <ide> fork(__filename, {env: env}).on('exit', common.mustCall(function(status) { <ide><path>test/parallel/test-tls-fast-writing.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const dir = common.fixturesDir; <del>const options = { key: fs.readFileSync(dir + '/test_key.pem'), <del> cert: fs.readFileSync(dir + '/test_cert.pem'), <del> ca: [ fs.readFileSync(dir + '/test_ca.pem') ] }; <add>const options = { key: fs.readFileSync(`${dir}/test_key.pem`), <add> cert: fs.readFileSync(`${dir}/test_cert.pem`), <add> ca: [ fs.readFileSync(`${dir}/test_ca.pem`) ] }; <ide> <ide> const server = tls.createServer(options, onconnection); <ide> let gotChunk = false; <ide><path>test/parallel/test-tls-friendly-error-message.js <ide> const tls = require('tls'); <ide> <ide> const fs = require('fs'); <ide> <del>const key = fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'); <del>const cert = fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'); <add>const key = fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`); <add>const cert = fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`); <ide> <ide> tls.createServer({ key: key, cert: cert }, common.mustCall(function(conn) { <ide> conn.end(); <ide><path>test/parallel/test-tls-getcipher.js <ide> const fs = require('fs'); <ide> const cipher_list = ['AES128-SHA256', 'AES256-SHA256']; <ide> const cipher_version_pattern = /TLS|SSL/; <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`), <ide> ciphers: cipher_list.join(':'), <ide> honorCipherOrder: true <ide> }; <ide><path>test/parallel/test-tls-getprotocol.js <ide> const clientConfigs = [ <ide> ]; <ide> <ide> const serverConfig = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`) <ide> }; <ide> <ide> const server = tls.createServer(serverConfig, common.mustCall(function() { <ide><path>test/parallel/test-tls-handshake-error.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const server = tls.createServer({ <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <ide> rejectUnauthorized: true <ide> }, function(c) { <ide> }).listen(0, common.mustCall(function() { <ide><path>test/parallel/test-tls-hello-parser-failure.js <ide> const net = require('net'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/test_key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/test_cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/test_key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/test_cert.pem`) <ide> }; <ide> <ide> const bonkers = Buffer.alloc(1024 * 1024, 42); <ide><path>test/parallel/test-tls-honorcipherorder.js <ide> process.on('exit', function() { <ide> function test(honorCipherOrder, clientCipher, expectedCipher, cb) { <ide> const soptions = { <ide> secureProtocol: SSL_Method, <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`), <ide> ciphers: 'AES256-SHA256:AES128-GCM-SHA256:AES128-SHA256:' + <ide> 'ECDHE-RSA-AES128-GCM-SHA256', <ide> honorCipherOrder: !!honorCipherOrder <ide><path>test/parallel/test-tls-interleave.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const dir = common.fixturesDir; <del>const options = { key: fs.readFileSync(dir + '/test_key.pem'), <del> cert: fs.readFileSync(dir + '/test_cert.pem'), <del> ca: [ fs.readFileSync(dir + '/test_ca.pem') ] }; <add>const options = { key: fs.readFileSync(`${dir}/test_key.pem`), <add> cert: fs.readFileSync(`${dir}/test_cert.pem`), <add> ca: [ fs.readFileSync(`${dir}/test_ca.pem`) ] }; <ide> <ide> const writes = [ <ide> 'some server data', <ide><path>test/parallel/test-tls-invoke-queued.js <ide> const fs = require('fs'); <ide> let received = ''; <ide> <ide> const server = tls.createServer({ <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }, function(c) { <ide> c._write('hello ', null, function() { <ide> c._write('world!', null, function() { <ide><path>test/parallel/test-tls-js-stream.js <ide> const connected = { <ide> }; <ide> <ide> const server = tls.createServer({ <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }, function(c) { <ide> console.log('new client'); <ide> connected.server++; <ide><path>test/parallel/test-tls-junk-closes-server.js <ide> const fs = require('fs'); <ide> const net = require('net'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`) <ide> }; <ide> <ide> const server = tls.createServer(options, common.mustNotCall()); <ide><path>test/parallel/test-tls-key-mismatch.js <ide> const errorMessageRegex = <ide> /^Error: error:0B080074:x509 certificate routines:X509_check_private_key:key values mismatch$/; <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`) <ide> }; <ide> <ide> assert.throws(function() { <ide><path>test/parallel/test-tls-max-send-fragment.js <ide> let received = 0; <ide> const maxChunk = 768; <ide> <ide> const server = tls.createServer({ <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }, function(c) { <ide> // Lower and upper limits <ide> assert(!c.setMaxSendFragment(511)); <ide><path>test/parallel/test-tls-multi-key.js <ide> const fs = require('fs'); <ide> <ide> const options = { <ide> key: [ <del> fs.readFileSync(common.fixturesDir + '/keys/ec-key.pem'), <del> fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <add> fs.readFileSync(`${common.fixturesDir}/keys/ec-key.pem`), <add> fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <ide> ], <ide> cert: [ <del> fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <del> fs.readFileSync(common.fixturesDir + '/keys/ec-cert.pem') <add> fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <add> fs.readFileSync(`${common.fixturesDir}/keys/ec-cert.pem`) <ide> ] <ide> }; <ide> <ide><path>test/parallel/test-tls-no-rsa-key.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/ec-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/ec-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/ec-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/ec-cert.pem`) <ide> }; <ide> <ide> const server = tls.createServer(options, function(conn) { <ide><path>test/parallel/test-tls-no-sslv3.js <ide> if (common.opensslCli === false) { <ide> return; <ide> } <ide> <del>const cert = fs.readFileSync(common.fixturesDir + '/test_cert.pem'); <del>const key = fs.readFileSync(common.fixturesDir + '/test_key.pem'); <add>const cert = fs.readFileSync(`${common.fixturesDir}/test_cert.pem`); <add>const key = fs.readFileSync(`${common.fixturesDir}/test_key.pem`); <ide> const server = tls.createServer({ cert: cert, key: key }, common.mustNotCall()); <ide> const errors = []; <ide> let stderr = ''; <ide> <ide> server.listen(0, '127.0.0.1', function() { <del> const address = this.address().address + ':' + this.address().port; <add> const address = `${this.address().address}:${this.address().port}`; <ide> const args = ['s_client', <ide> '-ssl3', <ide> '-connect', address]; <ide><path>test/parallel/test-tls-npn-server-client.js <ide> const tls = require('tls'); <ide> <ide> <ide> function filenamePEM(n) { <del> return require('path').join(common.fixturesDir, 'keys', n + '.pem'); <add> return require('path').join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> function loadPEM(n) { <ide><path>test/parallel/test-tls-on-empty-socket.js <ide> const net = require('net'); <ide> let out = ''; <ide> <ide> const server = tls.createServer({ <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }, function(c) { <ide> c.end('hello'); <ide> }).listen(0, function() { <ide><path>test/parallel/test-tls-over-http-tunnel.js <ide> const http = require('http'); <ide> <ide> let gotRequest = false; <ide> <del>const key = fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'); <del>const cert = fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'); <add>const key = fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`); <add>const cert = fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`); <ide> <ide> const options = { <ide> key: key, <ide><path>test/parallel/test-tls-pause.js <ide> server.listen(0, function() { <ide> return process.nextTick(send); <ide> } <ide> sent += bufSize; <del> console.error('sent: ' + sent); <add> console.error(`sent: ${sent}`); <ide> resumed = true; <ide> client.resume(); <ide> console.error('resumed', client); <ide> server.listen(0, function() { <ide> console.error('received', received); <ide> console.error('sent', sent); <ide> if (received >= sent) { <del> console.error('received: ' + received); <add> console.error(`received: ${received}`); <ide> client.end(); <ide> server.close(); <ide> } <ide><path>test/parallel/test-tls-regr-gh-5108.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> <ide><path>test/parallel/test-tls-request-timeout.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = tls.Server(options, common.mustCall(function(socket) { <ide><path>test/parallel/test-tls-retain-handle-no-abort.js <ide> const util = require('util'); <ide> const sent = 'hello world'; <ide> const serverOptions = { <ide> isServer: true, <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> let ssl = null; <ide><path>test/parallel/test-tls-securepair-fiftharg.js <ide> const fs = require('fs'); <ide> const tls = require('tls'); <ide> <ide> const sslcontext = tls.createSecureContext({ <del> cert: fs.readFileSync(common.fixturesDir + '/test_cert.pem'), <del> key: fs.readFileSync(common.fixturesDir + '/test_key.pem') <add> cert: fs.readFileSync(`${common.fixturesDir}/test_cert.pem`), <add> key: fs.readFileSync(`${common.fixturesDir}/test_key.pem`) <ide> }); <ide> <ide> let catchedServername; <ide> const pair = tls.createSecurePair(sslcontext, true, false, false, { <ide> }); <ide> <ide> // captured traffic from browser's request to https://www.google.com <del>const sslHello = fs.readFileSync(common.fixturesDir + '/google_ssl_hello.bin'); <add>const sslHello = fs.readFileSync(`${common.fixturesDir}/google_ssl_hello.bin`); <ide> <ide> pair.encrypted.write(sslHello); <ide> <ide><path>test/parallel/test-tls-securepair-server.js <ide> const key = fs.readFileSync(join(common.fixturesDir, 'agent.key')).toString(); <ide> const cert = fs.readFileSync(join(common.fixturesDir, 'agent.crt')).toString(); <ide> <ide> function log(a) { <del> console.error('***server*** ' + a); <add> console.error(`***server*** ${a}`); <ide> } <ide> <ide> const server = net.createServer(common.mustCall(function(socket) { <del> log('connection fd=' + socket.fd); <add> log(`connection fd=${socket.fd}`); <ide> const sslcontext = tls.createSecureContext({key: key, cert: cert}); <ide> sslcontext.context.setCiphers('RC4-SHA:AES128-SHA:AES256-SHA'); <ide> <ide> const server = net.createServer(common.mustCall(function(socket) { <ide> }); <ide> <ide> pair.cleartext.on('data', function(data) { <del> log('read bytes ' + data.length); <add> log(`read bytes ${data.length}`); <ide> pair.cleartext.write(data); <ide> }); <ide> <ide><path>test/parallel/test-tls-server-connection-server.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = tls.createServer(options, function(s) { <ide><path>test/parallel/test-tls-server-verify.js <ide> const spawn = require('child_process').spawn; <ide> <ide> <ide> function filenamePEM(n) { <del> return require('path').join(common.fixturesDir, 'keys', n + '.pem'); <add> return require('path').join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> <ide> function runClient(prefix, port, options, cb) { <ide> // - Certificate, but not signed by CA. <ide> // - Certificate signed by CA. <ide> <del> const args = ['s_client', '-connect', '127.0.0.1:' + port]; <add> const args = ['s_client', '-connect', `127.0.0.1:${port}`]; <ide> <ide> // for the performance issue in s_client on Windows <ide> if (common.isWindows) <ide> args.push('-no_rand_screen'); <ide> <del> console.log(prefix + ' connecting with', options.name); <add> console.log(`${prefix} connecting with`, options.name); <ide> <ide> switch (options.name) { <ide> case 'agent1': <ide> function runClient(prefix, port, options, cb) { <ide> break; <ide> <ide> default: <del> throw new Error(prefix + 'Unknown agent name'); <add> throw new Error(`${prefix}Unknown agent name`); <ide> } <ide> <ide> // To test use: openssl s_client -connect localhost:8000 <ide> function runClient(prefix, port, options, cb) { <ide> out += d; <ide> <ide> if (!goodbye && /_unauthed/g.test(out)) { <del> console.error(prefix + ' * unauthed'); <add> console.error(`${prefix} * unauthed`); <ide> goodbye = true; <ide> client.kill(); <ide> authed = false; <ide> rejected = false; <ide> } <ide> <ide> if (!goodbye && /_authed/g.test(out)) { <del> console.error(prefix + ' * authed'); <add> console.error(`${prefix} * authed`); <ide> goodbye = true; <ide> client.kill(); <ide> authed = true; <ide> function runClient(prefix, port, options, cb) { <ide> //client.stdout.pipe(process.stdout); <ide> <ide> client.on('exit', function(code) { <del> //assert.strictEqual(0, code, prefix + options.name + <del> // ": s_client exited with error code " + code); <add> //assert.strictEqual( <add> // 0, code, <add> // `${prefix}${options.name}: s_client exited with error code ${code}`); <ide> if (options.shouldReject) { <del> assert.strictEqual(true, rejected, prefix + options.name + <del> ' NOT rejected, but should have been'); <add> assert.strictEqual( <add> true, rejected, <add> `${prefix}${options.name} NOT rejected, but should have been`); <ide> } else { <del> assert.strictEqual(false, rejected, prefix + options.name + <del> ' rejected, but should NOT have been'); <del> assert.strictEqual(options.shouldAuth, authed, prefix + <del> options.name + ' authed is ' + authed + <del> ' but should have been ' + options.shouldAuth); <add> assert.strictEqual( <add> false, rejected, <add> `${prefix}${options.name} rejected, but should NOT have been`); <add> assert.strictEqual( <add> options.shouldAuth, authed, <add> `${prefix}${options.name} authed is ${authed} but should have been ${ <add> options.shouldAuth}`); <ide> } <ide> <ide> cb(); <ide> function runClient(prefix, port, options, cb) { <ide> // Run the tests <ide> let successfulTests = 0; <ide> function runTest(port, testIndex) { <del> const prefix = testIndex + ' '; <add> const prefix = `${testIndex} `; <ide> const tcase = testCases[testIndex]; <ide> if (!tcase) return; <ide> <del> console.error(prefix + "Running '%s'", tcase.title); <add> console.error(`${prefix}Running '%s'`, tcase.title); <ide> <ide> const cas = tcase.CAs.map(loadPEM); <ide> <ide> function runTest(port, testIndex) { <ide> if (tcase.renegotiate && !renegotiated) { <ide> renegotiated = true; <ide> setTimeout(function() { <del> console.error(prefix + '- connected, renegotiating'); <add> console.error(`${prefix}- connected, renegotiating`); <ide> c.write('\n_renegotiating\n'); <ide> return c.renegotiate({ <ide> requestCert: true, <ide> function runTest(port, testIndex) { <ide> } <ide> <ide> if (c.authorized) { <del> console.error(prefix + '- authed connection: ' + <del> c.getPeerCertificate().subject.CN); <add> console.error(`${prefix}- authed connection: ${ <add> c.getPeerCertificate().subject.CN}`); <ide> c.write('\n_authed\n'); <ide> } else { <del> console.error(prefix + '- unauthed connection: %s', c.authorizationError); <add> console.error(`${prefix}- unauthed connection: %s`, c.authorizationError); <ide> c.write('\n_unauthed\n'); <ide> } <ide> }); <ide> <ide> function runNextClient(clientIndex) { <ide> const options = tcase.clients[clientIndex]; <ide> if (options) { <del> runClient(prefix + clientIndex + ' ', port, options, function() { <add> runClient(`${prefix}${clientIndex} `, port, options, function() { <ide> runNextClient(clientIndex + 1); <ide> }); <ide> } else { <ide> function runTest(port, testIndex) { <ide> server.listen(port, function() { <ide> port = server.address().port; <ide> if (tcase.debug) { <del> console.error(prefix + 'TLS server running on port ' + port); <add> console.error(`${prefix}TLS server running on port ${port}`); <ide> } else { <ide> if (tcase.renegotiate) { <ide> runNextClient(0); <ide> } else { <ide> let clientsCompleted = 0; <ide> for (let i = 0; i < tcase.clients.length; i++) { <del> runClient(prefix + i + ' ', port, tcase.clients[i], function() { <add> runClient(`${prefix}${i} `, port, tcase.clients[i], function() { <ide> clientsCompleted++; <ide> if (clientsCompleted === tcase.clients.length) { <ide> server.close(); <ide><path>test/parallel/test-tls-set-ciphers.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`), <ide> ciphers: 'DES-CBC3-SHA' <ide> }; <ide> <ide> const server = tls.createServer(options, common.mustCall(function(conn) { <ide> })); <ide> <ide> server.listen(0, '127.0.0.1', function() { <del> let cmd = '"' + common.opensslCli + '" s_client -cipher ' + options.ciphers + <del> ` -connect 127.0.0.1:${this.address().port}`; <add> let cmd = `"${common.opensslCli}" s_client -cipher ${ <add> options.ciphers} -connect 127.0.0.1:${this.address().port}`; <ide> <ide> // for the performance and stability issue in s_client on Windows <ide> if (common.isWindows) <ide><path>test/parallel/test-tls-set-encoding.js <ide> const fs = require('fs'); <ide> <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`) <ide> }; <ide> <ide> // Contains a UTF8 only character <ide><path>test/parallel/test-tls-sni-option.js <ide> if (!common.hasCrypto) { <ide> const tls = require('tls'); <ide> <ide> function filenamePEM(n) { <del> return require('path').join(common.fixturesDir, 'keys', n + '.pem'); <add> return require('path').join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> function loadPEM(n) { <ide><path>test/parallel/test-tls-sni-server-client.js <ide> if (!common.hasCrypto) { <ide> const tls = require('tls'); <ide> <ide> function filenamePEM(n) { <del> return require('path').join(common.fixturesDir, 'keys', n + '.pem'); <add> return require('path').join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> function loadPEM(n) { <ide><path>test/parallel/test-tls-socket-close.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> const net = require('net'); <ide> <del>const key = fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'); <del>const cert = fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'); <add>const key = fs.readFileSync(`${common.fixturesDir}/keys/agent2-key.pem`); <add>const cert = fs.readFileSync(`${common.fixturesDir}/keys/agent2-cert.pem`); <ide> <ide> let tlsSocket; <ide> // tls server <ide><path>test/parallel/test-tls-socket-destroy.js <ide> const fs = require('fs'); <ide> const net = require('net'); <ide> const tls = require('tls'); <ide> <del>const key = fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'); <del>const cert = fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'); <add>const key = fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`); <add>const cert = fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`); <ide> const secureContext = tls.createSecureContext({ key, cert }); <ide> <ide> const server = net.createServer(common.mustCall((conn) => { <ide><path>test/parallel/test-tls-startcom-wosign-whitelist.js <ide> const path = require('path'); <ide> let finished = 0; <ide> <ide> function filenamePEM(n) { <del> return path.join(common.fixturesDir, 'keys', n + '.pem'); <add> return path.join(common.fixturesDir, 'keys', `${n}.pem`); <ide> } <ide> <ide> function loadPEM(n) { <ide><path>test/parallel/test-tls-starttls-server.js <ide> const fs = require('fs'); <ide> const net = require('net'); <ide> const tls = require('tls'); <ide> <del>const key = fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'); <del>const cert = fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'); <add>const key = fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`); <add>const cert = fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`); <ide> <ide> const server = net.createServer(common.mustCall((s) => { <ide> const tlsSocket = new tls.TLSSocket(s, { <ide><path>test/parallel/test-tls-ticket.js <ide> function createServer() { <ide> let previousKey = null; <ide> <ide> const server = tls.createServer({ <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <ide> ticketKeys: keys <ide> }, function(c) { <ide> serverLog.push(id); <ide><path>test/parallel/test-tls-timeout-server-2.js <ide> const tls = require('tls'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = tls.createServer(options, common.mustCall(function(cleartext) { <ide><path>test/parallel/test-tls-timeout-server.js <ide> const net = require('net'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'), <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`), <ide> handshakeTimeout: 50 <ide> }; <ide> <ide><path>test/parallel/test-tls-two-cas-one-string.js <ide> function test(ca, next) { <ide> } <ide> <ide> const array = [ca1, ca2]; <del>const string = ca1 + '\n' + ca2; <add>const string = `${ca1}\n${ca2}`; <ide> test(array, () => test(string, common.noop)); <ide><path>test/parallel/test-tls-wrap-timeout.js <ide> const net = require('net'); <ide> const fs = require('fs'); <ide> <ide> const options = { <del> key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), <del> cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') <add> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), <add> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) <ide> }; <ide> <ide> const server = tls.createServer(options, common.mustCall((c) => { <ide><path>test/parallel/test-url-format.js <ide> for (const u in formatTests) { <ide> const actual = url.format(u); <ide> const actualObj = url.format(formatTests[u]); <ide> assert.strictEqual(actual, expect, <del> 'wonky format(' + u + ') == ' + expect + <del> '\nactual:' + actual); <add> `wonky format(${u}) == ${expect}\nactual:${actual}`); <ide> assert.strictEqual(actualObj, expect, <del> 'wonky format(' + JSON.stringify(formatTests[u]) + <del> ') == ' + expect + <del> '\nactual: ' + actualObj); <add> `wonky format(${JSON.stringify(formatTests[u])}) == ${ <add> expect}\nactual: ${actualObj}`); <ide> } <ide><path>test/parallel/test-url-parse-format.js <ide> for (const u in parseTests) { <ide> actual = url.format(parseTests[u]); <ide> <ide> assert.strictEqual(actual, expected, <del> 'format(' + u + ') == ' + u + '\nactual:' + actual); <add> `format(${u}) == ${u}\nactual:${actual}`); <ide> }
300
PHP
PHP
fix tests for php7.2 session_id problems
f541bdcb551d35a1894ae4624bb8d5e1429696d3
<ide><path>tests/TestCase/Controller/Component/SecurityComponentTest.php <ide> class SecurityComponentTest extends TestCase <ide> public function setUp() <ide> { <ide> parent::setUp(); <del> session_id('cli'); <ide> <ide> $this->server = $_SERVER; <ide> $session = new Session(); <ide><path>tests/TestCase/View/Helper/FormHelperTest.php <ide> class FormHelperTest extends TestCase <ide> public function setUp() <ide> { <ide> parent::setUp(); <del> session_id(''); <ide> <ide> Configure::write('Config.language', 'eng'); <ide> Configure::write('App.base', ''); <ide> public function testValidateHashNoModel() <ide> { <ide> $this->Form->request->params['_Token'] = 'foo'; <ide> <del> $result = $this->Form->secure(['anything']); <del> $this->assertRegExp('/b9731869b9915e3dee6250db1a1fad464371fb94/', $result); <add> $fields = ['anything']; <add> $result = $this->Form->secure($fields); <add> <add> $hash = hash_hmac('sha1', serialize($fields) . session_id(), Security::salt()); <add> $this->assertContains($hash, $result); <ide> } <ide> <ide> /** <ide> public function testFormSecurityFields() <ide> $this->Form->request->params['_Token'] = 'testKey'; <ide> $result = $this->Form->secure($fields); <ide> <del> $hash = hash_hmac('sha1', serialize($fields), Security::salt()); <add> $hash = hash_hmac('sha1', serialize($fields) . session_id(), Security::salt()); <ide> $hash .= ':' . 'Model.valid'; <ide> $hash = urlencode($hash); <ide> $tokenDebug = urlencode(json_encode([ <ide> public function testFormSecurityFieldsNoDebugMode() <ide> $this->Form->request->params['_Token'] = 'testKey'; <ide> $result = $this->Form->secure($fields); <ide> <del> $hash = hash_hmac('sha1', serialize($fields), Security::salt()); <add> $hash = hash_hmac('sha1', serialize($fields) . session_id(), Security::salt()); <ide> $hash .= ':' . 'Model.valid'; <ide> $hash = urlencode($hash); <ide> $expected = [ <ide> public function testFormSecurityMultipleFields() <ide> ]; <ide> $result = $this->Form->secure($fields); <ide> <del> $hash = '51e3b55a6edd82020b3f29c9ae200e14bbeb7ee5%3AModel.0.hidden%7CModel.0.valid'; <del> $hash = '16e544e04f6d3007231e3e23f8f73427a53272d4%3AModel.0.hidden%7CModel.0.valid'; <del> $hash .= '%7CModel.1.hidden%7CModel.1.valid'; <add> $hash = '8670192c3f040bf58680479060b4755b7a5c3596' . <add> '%3AModel.0.hidden%7CModel.0.valid%7CModel.1.hidden%7CModel.1.valid'; <ide> $tokenDebug = urlencode(json_encode([ <ide> '', <ide> $fields, <ide> public function testFormSecurityMultipleControlFields() <ide> $this->Form->control('Addresses.1.primary', ['type' => 'checkbox']); <ide> <ide> $result = $this->Form->secure($this->Form->fields); <del> $hash = '587942c6810603a6d5a07a394316dda455580227%3AAddresses.0.id%7CAddresses.1.id'; <add> $hash = 'a4fe49bde94894a01375e7aa2873ea8114a96471%3AAddresses.0.id%7CAddresses.1.id'; <ide> $tokenDebug = urlencode(json_encode([ <ide> '/articles/add', <ide> [ <ide> public function testFormSecurityMultipleControlDisabledFields() <ide> $this->Form->text('Addresses.1.phone'); <ide> <ide> $result = $this->Form->secure($this->Form->fields); <del> $hash = '8db4b5f1a912dfafd9c264964df7aa598ea322c0%3AAddresses.0.id%7CAddresses.1.id'; <add> $hash = '43c4db25e4162c5e4edd9dea51f5f9d9d92215ec%3AAddresses.0.id%7CAddresses.1.id'; <ide> $tokenDebug = urlencode(json_encode([ <ide> '/articles/add', <ide> [ <ide> public function testFormSecurityControlUnlockedFields() <ide> <ide> $result = $this->Form->secure($expected, ['data-foo' => 'bar']); <ide> <del> $hash = 'cdc8fa2dd2aa2804c12cd17279c39747f1c57354%3AAddresses.id'; <add> $hash = 'f98315a7d5515e5ae32e35f7d680207c085fae69%3AAddresses.id'; <ide> $tokenDebug = urlencode(json_encode([ <ide> '/articles/add', <ide> [ <ide> public function testFormSecurityControlUnlockedFieldsDebugSecurityTrue() <ide> $this->assertEquals($expected, $result); <ide> $result = $this->Form->secure($expected, ['data-foo' => 'bar', 'debugSecurity' => true]); <ide> <del> $hash = 'cdc8fa2dd2aa2804c12cd17279c39747f1c57354%3AAddresses.id'; <add> $hash = 'f98315a7d5515e5ae32e35f7d680207c085fae69%3AAddresses.id'; <ide> $tokenDebug = urlencode(json_encode([ <ide> '/articles/add', <ide> [ <ide> public function testFormSecurityControlUnlockedFieldsDebugSecurityDebugFalse() <ide> Configure::write('debug', false); <ide> $result = $this->Form->secure($expected, ['data-foo' => 'bar', 'debugSecurity' => true]); <ide> <del> $hash = 'cdc8fa2dd2aa2804c12cd17279c39747f1c57354%3AAddresses.id'; <add> $hash = 'f98315a7d5515e5ae32e35f7d680207c085fae69%3AAddresses.id'; <ide> $expected = [ <ide> 'div' => ['style' => 'display:none;'], <ide> ['input' => [ <ide> public function testFormSecurityControlUnlockedFieldsDebugSecurityFalse() <ide> $this->assertEquals($expected, $result); <ide> <ide> $result = $this->Form->secure($expected, ['data-foo' => 'bar', 'debugSecurity' => false]); <del> <del> $hash = 'cdc8fa2dd2aa2804c12cd17279c39747f1c57354%3AAddresses.id'; <add> $hash = 'f98315a7d5515e5ae32e35f7d680207c085fae69%3AAddresses.id'; <ide> <ide> $expected = [ <ide> 'div' => ['style' => 'display:none;'], <ide> public function testSecuredFormUrlIgnoresHost() <ide> { <ide> $this->Form->request->params['_Token'] = ['key' => 'testKey']; <ide> <del> $expected = '8312b8faa7e74c6f36e05c8d188eda58b39fab20%3A'; <add> $expected = '2548654895b160d724042ed269a2a863fd9d66ee%3A'; <ide> $this->Form->create($this->article, [ <ide> 'url' => ['controller' => 'articles', 'action' => 'view', 1, '?' => ['page' => 1]] <ide> ]); <ide> public function testSecuredFormUrlHasHtmlAndIdentifier() <ide> { <ide> $this->Form->request->params['_Token'] = ['key' => 'testKey']; <ide> <del> $expected = '93acdc2336947d62cf057a17275264c1fecc2443%3A'; <add> $expected = '0a913f45b887b4d9cc2650ef1edc50183896959c%3A'; <ide> $this->Form->create($this->article, [ <ide> 'url' => [ <ide> 'controller' => 'articles', <ide> public function testSelectMultipleCheckboxSecurity() <ide> $this->assertEquals(['Model.multi_field'], $this->Form->fields); <ide> <ide> $result = $this->Form->secure($this->Form->fields); <del> $key = '3cecbba5b65c8792d963b0498c67741466e61d47%3A'; <del> $this->assertRegExp('/"' . $key . '"/', $result); <add> $key = '8af36fb34e6f2ef8ba0eb473bb4365ec232f3fe5%3A'; <add> $this->assertContains('"' . $key . '"', $result); <ide> } <ide> <ide> /** <ide> public function testPostLinkWithData() <ide> */ <ide> public function testPostLinkSecurityHash() <ide> { <del> $hash = hash_hmac('sha1', '/posts/delete/1' . serialize(['id' => '1']), Security::getSalt()); <add> $hash = hash_hmac('sha1', '/posts/delete/1' . serialize(['id' => '1']) . session_id(), Security::getSalt()); <ide> $hash .= '%3Aid'; <ide> $this->Form->request->params['_Token']['key'] = 'test'; <ide> <ide> public function testPostLinkSecurityHash() <ide> */ <ide> public function testPostLinkSecurityHashBlockMode() <ide> { <del> $hash = hash_hmac('sha1', '/posts/delete/1' . serialize([]), Security::getSalt()); <add> $hash = hash_hmac('sha1', '/posts/delete/1' . serialize([]) . session_id(), Security::getSalt()); <ide> $hash .= '%3A'; <ide> $this->Form->request->params['_Token']['key'] = 'test'; <ide> <ide> public function testPostLinkSecurityHashBlockMode() <ide> public function testPostLinkSecurityHashNoDebugMode() <ide> { <ide> Configure::write('debug', false); <del> $hash = hash_hmac('sha1', '/posts/delete/1' . serialize(['id' => '1']), Security::getSalt()); <add> $hash = hash_hmac('sha1', '/posts/delete/1' . serialize(['id' => '1']) . session_id(), Security::getSalt()); <ide> $hash .= '%3Aid'; <ide> $this->Form->request->params['_Token']['key'] = 'test'; <ide> <ide><path>tests/bootstrap.php <ide> class_alias('PHPUnit_Framework_Error', 'PHPUnit\Framework\Error\Error'); <ide> class_alias('PHPUnit_Framework_Error_Warning', 'PHPUnit\Framework\Error\Warning'); <ide> class_alias('PHPUnit_Framework_ExpectationFailedException', 'PHPUnit\Framework\ExpectationFailedException'); <ide> } <add> <add>// Fixate sessionid early on, as php7.2+ <add>// does not allow the sessionid to be set after stdout <add>// has been written to. <add>session_id('cli');
3
Ruby
Ruby
unify param.require tests
aae53d2175b306bf9e085a86d9e5d8a194fcf63b
<ide><path>actionpack/test/controller/parameters/parameters_require_test.rb <del>require 'abstract_unit' <del>require 'action_controller/metal/strong_parameters' <del> <del>class ParametersRequireTest < ActiveSupport::TestCase <del> test "required parameters must be present not merely not nil" do <del> assert_raises(ActionController::ParameterMissing) do <del> ActionController::Parameters.new(person: {}).require(:person) <del> end <del> end <del>end <ide><path>actionpack/test/controller/required_params_test.rb <ide> class ActionControllerRequiredParamsTest < ActionController::TestCase <ide> assert_response :ok <ide> end <ide> end <add> <add>class ParametersRequireTest < ActiveSupport::TestCase <add> test "required parameters must be present not merely not nil" do <add> assert_raises(ActionController::ParameterMissing) do <add> ActionController::Parameters.new(person: {}).require(:person) <add> end <add> end <add>end
2
PHP
PHP
add tests for method retrievechannelsoptions
304de5fc046c16eb08ab1415cc7755981ac1f3cb
<ide><path>tests/Broadcasting/BroadcasterTest.php <ide> public function testCanRegisterChannelsWithOptions() <ide> <ide> $options = [ 'a' => [ 'b', 'c' ] ]; <ide> $broadcaster->channel('somechannel', function () {}, $options); <add> } <add> <add> public function testCanRetrieveChannelsOptions() <add> { <add> $broadcaster = new FakeBroadcaster; <add> <add> $options = [ 'a' => [ 'b', 'c' ] ]; <add> $broadcaster->channel('somechannel', function () {}, $options); <ide> <ide> $this->assertEquals( <ide> $options, <ide> $broadcaster->retrieveChannelOptions('somechannel') <ide> ); <ide> } <ide> <add> public function testCanRetrieveChannelsOptionsUsingAChannelNameContainingArgs() <add> { <add> $broadcaster = new FakeBroadcaster; <add> <add> $options = [ 'a' => [ 'b', 'c' ] ]; <add> $broadcaster->channel('somechannel.{id}.test.{text}', function () {}, $options); <add> <add> $this->assertEquals( <add> $options, <add> $broadcaster->retrieveChannelOptions('somechannel.23.test.mytext') <add> ); <add> } <add> <add> public function testCanRetrieveChannelsOptionsWhenMultipleChannelsAreRegistered() <add> { <add> $broadcaster = new FakeBroadcaster; <add> <add> $options = [ 'a' => [ 'b', 'c' ] ]; <add> $broadcaster->channel('somechannel', function () {}); <add> $broadcaster->channel('someotherchannel', function () {}, $options); <add> <add> $this->assertEquals( <add> $options, <add> $broadcaster->retrieveChannelOptions('someotherchannel') <add> ); <add> } <add> <add> public function testDontRetrieveChannelsOptionsWhenChannelDoesntExists() <add> { <add> $broadcaster = new FakeBroadcaster; <add> <add> $options = [ 'a' => [ 'b', 'c' ] ]; <add> $broadcaster->channel('somechannel', function () {}, $options); <add> <add> $this->assertEquals( <add> [], <add> $broadcaster->retrieveChannelOptions('someotherchannel') <add> ); <add> } <add> <ide> public function testRetrieveUserWithoutGuard() <ide> { <ide> $broadcaster = new FakeBroadcaster;
1
Javascript
Javascript
fix indentation of polyfills/require.js
5b3920567d98f4b6219b6752dd06e1a5a82f9755
<ide><path>packager/src/Resolver/polyfills/require.js <ide> * @flow <ide> */ <ide> <del> 'use strict'; <add>'use strict'; <ide> <del> declare var __DEV__: boolean; <add>declare var __DEV__: boolean; <ide> <del> type DependencyMap = Array<ModuleID>; <del> type Exports = any; <del> type FactoryFn = ( <add>type DependencyMap = Array<ModuleID>; <add>type Exports = any; <add>type FactoryFn = ( <ide> global: Object, <ide> require: RequireFn, <ide> moduleObject: {exports: {}}, <ide> exports: {}, <ide> dependencyMap: ?DependencyMap, <ide> ) => void; <del> type HotModuleReloadingAcceptFn = Function; <del> type HotModuleReloadingData = {| <add>type HotModuleReloadingAcceptFn = Function; <add>type HotModuleReloadingData = {| <ide> acceptCallback: ?HotModuleReloadingAcceptFn, <ide> accept: (callback: HotModuleReloadingAcceptFn) => void, <ide> |}; <del> type Module = { <add>type Module = { <ide> exports: Exports, <ide> hot?: HotModuleReloadingData, <ide> }; <del> type ModuleID = number; <del> type ModuleDefinition = {| <add>type ModuleID = number; <add>type ModuleDefinition = {| <ide> dependencyMap: ?DependencyMap, <ide> exports: Exports, <ide> factory: FactoryFn, <ide> isInitialized: boolean, <ide> verboseName?: string, <ide> |}; <del> type ModuleMap = <add>type ModuleMap = <ide> {[key: ModuleID]: (ModuleDefinition)}; <del> type RequireFn = (id: ModuleID | VerboseModuleNameForDev) => Exports; <del> type VerboseModuleNameForDev = string; <add>type RequireFn = (id: ModuleID | VerboseModuleNameForDev) => Exports; <add>type VerboseModuleNameForDev = string; <ide> <del> global.require = require; <del> global.__d = define; <add>global.require = require; <add>global.__d = define; <ide> <del> const modules: ModuleMap = Object.create(null); <del> if (__DEV__) { <del> var verboseNamesToModuleIds: {[key: string]: number} = Object.create(null); <del> } <add>const modules: ModuleMap = Object.create(null); <add>if (__DEV__) { <add> var verboseNamesToModuleIds: {[key: string]: number} = Object.create(null); <add>} <ide> <del> function define( <add>function define( <ide> factory: FactoryFn, <ide> moduleId: number, <ide> dependencyMap?: DependencyMap, <ide> ) { <del> if (moduleId in modules) { <add> if (moduleId in modules) { <ide> // prevent repeated calls to `global.nativeRequire` to overwrite modules <ide> // that are already loaded <del> return; <del> } <del> modules[moduleId] = { <del> dependencyMap, <del> exports: undefined, <del> factory, <del> hasError: false, <del> isInitialized: false, <del> }; <del> if (__DEV__) { <add> return; <add> } <add> modules[moduleId] = { <add> dependencyMap, <add> exports: undefined, <add> factory, <add> hasError: false, <add> isInitialized: false, <add> }; <add> if (__DEV__) { <ide> // HMR <del> modules[moduleId].hot = createHotReloadingObject(); <add> modules[moduleId].hot = createHotReloadingObject(); <ide> <ide> // DEBUGGABLE MODULES NAMES <ide> // we take `verboseName` from `arguments` to avoid an unused named parameter <ide> // in `define` in production. <del> const verboseName: string | void = arguments[3]; <del> if (verboseName) { <del> modules[moduleId].verboseName = verboseName; <del> verboseNamesToModuleIds[verboseName] = moduleId; <del> } <del> } <del> } <del> <del> function require(moduleId: ModuleID | VerboseModuleNameForDev) { <del> if (__DEV__ && typeof moduleId === 'string') { <del> const verboseName = moduleId; <del> moduleId = verboseNamesToModuleIds[moduleId]; <del> if (moduleId == null) { <del> throw new Error(`Unknown named module: '${verboseName}'`); <del> } else { <del> console.warn( <add> const verboseName: string | void = arguments[3]; <add> if (verboseName) { <add> modules[moduleId].verboseName = verboseName; <add> verboseNamesToModuleIds[verboseName] = moduleId; <add> } <add> } <add>} <add> <add>function require(moduleId: ModuleID | VerboseModuleNameForDev) { <add> if (__DEV__ && typeof moduleId === 'string') { <add> const verboseName = moduleId; <add> moduleId = verboseNamesToModuleIds[moduleId]; <add> if (moduleId == null) { <add> throw new Error(`Unknown named module: '${verboseName}'`); <add> } else { <add> console.warn( <ide> `Requiring module '${verboseName}' by name is only supported for ` + <ide> 'debugging purposes and will BREAK IN PRODUCTION!' <ide> ); <del> } <del> } <add> } <add> } <ide> <ide> //$FlowFixMe: at this point we know that moduleId is a number <del> const moduleIdReallyIsNumber: number = moduleId; <del> const module = modules[moduleIdReallyIsNumber]; <del> return module && module.isInitialized <add> const moduleIdReallyIsNumber: number = moduleId; <add> const module = modules[moduleIdReallyIsNumber]; <add> return module && module.isInitialized <ide> ? module.exports <ide> : guardedLoadModule(moduleIdReallyIsNumber, module); <del> } <del> <del> let inGuard = false; <del> function guardedLoadModule(moduleId: ModuleID, module) { <del> if (!inGuard && global.ErrorUtils) { <del> inGuard = true; <del> let returnValue; <del> try { <del> returnValue = loadModuleImplementation(moduleId, module); <del> } catch (e) { <del> global.ErrorUtils.reportFatalError(e); <del> } <del> inGuard = false; <del> return returnValue; <del> } else { <del> return loadModuleImplementation(moduleId, module); <del> } <del> } <del> <del> function loadModuleImplementation(moduleId, module) { <del> const nativeRequire = global.nativeRequire; <del> if (!module && nativeRequire) { <del> nativeRequire(moduleId); <del> module = modules[moduleId]; <del> } <del> <del> if (!module) { <del> throw unknownModuleError(moduleId); <del> } <del> <del> if (module.hasError) { <del> throw moduleThrewError(moduleId); <del> } <add>} <add> <add>let inGuard = false; <add>function guardedLoadModule(moduleId: ModuleID, module) { <add> if (!inGuard && global.ErrorUtils) { <add> inGuard = true; <add> let returnValue; <add> try { <add> returnValue = loadModuleImplementation(moduleId, module); <add> } catch (e) { <add> global.ErrorUtils.reportFatalError(e); <add> } <add> inGuard = false; <add> return returnValue; <add> } else { <add> return loadModuleImplementation(moduleId, module); <add> } <add>} <add> <add>function loadModuleImplementation(moduleId, module) { <add> const nativeRequire = global.nativeRequire; <add> if (!module && nativeRequire) { <add> nativeRequire(moduleId); <add> module = modules[moduleId]; <add> } <add> <add> if (!module) { <add> throw unknownModuleError(moduleId); <add> } <add> <add> if (module.hasError) { <add> throw moduleThrewError(moduleId); <add> } <ide> <ide> // `require` calls int the require polyfill itself are not analyzed and <ide> // replaced so that they use numeric module IDs. <ide> // The systrace module will expose itself on the require function so that <ide> // it can be used here. <ide> // TODO(davidaurelio) Scan polyfills for dependencies, too (t9759686) <del> if (__DEV__) { <del> var {Systrace} = require; <del> } <add> if (__DEV__) { <add> var {Systrace} = require; <add> } <ide> <ide> // We must optimistically mark module as initialized before running the <ide> // factory to keep any require cycles inside the factory from causing an <ide> // infinite require loop. <del> module.isInitialized = true; <del> const exports = module.exports = {}; <del> const {factory, dependencyMap} = module; <del> try { <del> if (__DEV__) { <add> module.isInitialized = true; <add> const exports = module.exports = {}; <add> const {factory, dependencyMap} = module; <add> try { <add> if (__DEV__) { <ide> // $FlowFixMe: we know that __DEV__ is const and `Systrace` exists <del> Systrace.beginEvent('JS_require_' + (module.verboseName || moduleId)); <del> } <add> Systrace.beginEvent('JS_require_' + (module.verboseName || moduleId)); <add> } <ide> <del> const moduleObject: Module = {exports}; <del> if (__DEV__ && module.hot) { <del> moduleObject.hot = module.hot; <del> } <add> const moduleObject: Module = {exports}; <add> if (__DEV__ && module.hot) { <add> moduleObject.hot = module.hot; <add> } <ide> <ide> // keep args in sync with with defineModuleCode in <ide> // packager/src//Resolver/index.js <ide> // and packager/src//ModuleGraph/worker.js <del> factory(global, require, moduleObject, exports, dependencyMap); <add> factory(global, require, moduleObject, exports, dependencyMap); <ide> <ide> // avoid removing factory in DEV mode as it breaks HMR <del> if (!__DEV__) { <add> if (!__DEV__) { <ide> // $FlowFixMe: This is only sound because we never access `factory` again <del> module.factory = undefined; <del> } <add> module.factory = undefined; <add> } <ide> <del> if (__DEV__) { <add> if (__DEV__) { <ide> // $FlowFixMe: we know that __DEV__ is const and `Systrace` exists <del> Systrace.endEvent(); <del> } <del> return (module.exports = moduleObject.exports); <del> } catch (e) { <del> module.hasError = true; <del> module.isInitialized = false; <del> module.exports = undefined; <del> throw e; <del> } <del> } <del> <del> function unknownModuleError(id) { <del> let message = 'Requiring unknown module "' + id + '".'; <del> if (__DEV__) { <del> message += <add> Systrace.endEvent(); <add> } <add> return (module.exports = moduleObject.exports); <add> } catch (e) { <add> module.hasError = true; <add> module.isInitialized = false; <add> module.exports = undefined; <add> throw e; <add> } <add>} <add> <add>function unknownModuleError(id) { <add> let message = 'Requiring unknown module "' + id + '".'; <add> if (__DEV__) { <add> message += <ide> 'If you are sure the module is there, try restarting the packager. ' + <ide> 'You may also want to run `npm install`, or `yarn` (depending on your environment).'; <del> } <del> return Error(message); <del> } <add> } <add> return Error(message); <add>} <ide> <del> function moduleThrewError(id) { <del> return Error('Requiring module "' + id + '", which threw an exception.'); <del> } <add>function moduleThrewError(id) { <add> return Error('Requiring module "' + id + '", which threw an exception.'); <add>} <ide> <del> if (__DEV__) { <del> require.Systrace = {beginEvent: () => {}, endEvent: () => {}}; <add>if (__DEV__) { <add> require.Systrace = {beginEvent: () => {}, endEvent: () => {}}; <ide> <ide> // HOT MODULE RELOADING <del> var createHotReloadingObject = function() { <del> const hot: HotModuleReloadingData = { <del> acceptCallback: null, <del> accept: callback => { hot.acceptCallback = callback; }, <del> }; <del> return hot; <del> }; <del> <del> const acceptAll = function( <add> var createHotReloadingObject = function() { <add> const hot: HotModuleReloadingData = { <add> acceptCallback: null, <add> accept: callback => { hot.acceptCallback = callback; }, <add> }; <add> return hot; <add> }; <add> <add> const acceptAll = function( <ide> dependentModules, <ide> inverseDependencies, <ide> ) { <del> if (!dependentModules || dependentModules.length === 0) { <del> return true; <del> } <add> if (!dependentModules || dependentModules.length === 0) { <add> return true; <add> } <ide> <del> const notAccepted = dependentModules.filter( <add> const notAccepted = dependentModules.filter( <ide> module => !accept(module, /*factory*/ undefined, inverseDependencies)); <ide> <del> const parents = []; <del> for (let i = 0; i < notAccepted.length; i++) { <add> const parents = []; <add> for (let i = 0; i < notAccepted.length; i++) { <ide> // if the module has no parents then the change cannot be hot loaded <del> if (inverseDependencies[notAccepted[i]].length === 0) { <del> return false; <del> } <add> if (inverseDependencies[notAccepted[i]].length === 0) { <add> return false; <add> } <ide> <del> parents.push(...inverseDependencies[notAccepted[i]]); <del> } <add> parents.push(...inverseDependencies[notAccepted[i]]); <add> } <ide> <del> return acceptAll(parents, inverseDependencies); <del> }; <add> return acceptAll(parents, inverseDependencies); <add> }; <ide> <del> const accept = function( <add> const accept = function( <ide> id: ModuleID, <ide> factory?: FactoryFn, <ide> inverseDependencies: {[key: ModuleID]: Array<ModuleID>}, <ide> ) { <del> const mod = modules[id]; <add> const mod = modules[id]; <ide> <del> if (!mod && factory) { // new modules need a factory <del> define(factory, id); <del> return true; // new modules don't need to be accepted <del> } <add> if (!mod && factory) { // new modules need a factory <add> define(factory, id); <add> return true; // new modules don't need to be accepted <add> } <ide> <del> const {hot} = mod; <del> if (!hot) { <del> console.warn( <add> const {hot} = mod; <add> if (!hot) { <add> console.warn( <ide> 'Cannot accept module because Hot Module Replacement ' + <ide> 'API was not installed.' <ide> ); <del> return false; <del> } <add> return false; <add> } <ide> <ide> // replace and initialize factory <del> if (factory) { <del> mod.factory = factory; <del> } <del> mod.hasError = false; <del> mod.isInitialized = false; <del> require(id); <del> <del> if (hot.acceptCallback) { <del> hot.acceptCallback(); <del> return true; <del> } else { <add> if (factory) { <add> mod.factory = factory; <add> } <add> mod.hasError = false; <add> mod.isInitialized = false; <add> require(id); <add> <add> if (hot.acceptCallback) { <add> hot.acceptCallback(); <add> return true; <add> } else { <ide> // need to have inverseDependencies to bubble up accept <del> if (!inverseDependencies) { <del> throw new Error('Undefined `inverseDependencies`'); <del> } <add> if (!inverseDependencies) { <add> throw new Error('Undefined `inverseDependencies`'); <add> } <ide> <ide> // accept parent modules recursively up until all siblings are accepted <del> return acceptAll(inverseDependencies[id], inverseDependencies); <del> } <del> }; <add> return acceptAll(inverseDependencies[id], inverseDependencies); <add> } <add> }; <ide> <del> global.__accept = accept; <del> } <add> global.__accept = accept; <add>}
1
Ruby
Ruby
remove new line between doc and method
164be37da5bdcf99a941ab221b90d8c8adfd1a82
<ide><path>actionpack/lib/action_dispatch/request/session.rb <ide> class Session # :nodoc: <ide> Unspecified = Object.new <ide> <ide> # Creates a session hash, merging the properties of the previous session if any <del> <ide> def self.create(store, env, default_options) <ide> session_was = find env <ide> session = Request::Session.new(store, env)
1
Ruby
Ruby
fix xmlschema output with fraction_digits >0
c2b79c011f4ec39426c93812ab58660718c7cb92
<ide><path>activesupport/lib/active_support/time_with_zone.rb <ide> def inspect <ide> <ide> def xmlschema(fraction_digits = 0) <ide> fraction = if fraction_digits > 0 <del> ".%i" % time.usec.to_s[0, fraction_digits] <add> (".%06i" % time.usec)[0, fraction_digits + 1] <ide> end <ide> <ide> "#{time.strftime("%Y-%m-%dT%H:%M:%S")}#{fraction}#{formatted_offset(true, 'Z')}" <ide><path>activesupport/test/core_ext/time_with_zone_test.rb <ide> def test_xmlschema_with_fractional_seconds <ide> assert_equal "1999-12-31T19:00:00.123456-05:00", @twz.xmlschema(12) <ide> end <ide> <add> def test_xmlschema_with_fractional_seconds_lower_than_hundred_thousand <add> @twz += 0.001234 # advance the time by a fraction <add> assert_equal "1999-12-31T19:00:00.001-05:00", @twz.xmlschema(3) <add> assert_equal "1999-12-31T19:00:00.001234-05:00", @twz.xmlschema(6) <add> assert_equal "1999-12-31T19:00:00.001234-05:00", @twz.xmlschema(12) <add> end <add> <ide> def test_to_yaml <ide> assert_match(/^--- 2000-01-01 00:00:00(\.0+)?\s*Z\n/, @twz.to_yaml) <ide> end
2
Text
Text
add missing set_visibility_filter import
9df3c15023e3362959d7e56aa36989f1e84a46c7
<ide><path>docs/basics/Reducers.md <ide> Note that: <ide> We have two more actions to handle! Just like we did with `SET_VISIBILITY_FILTER`, we'll import the `ADD_TODO` and `TOGGLE_TODO` actions and then extend our reducer to handle `ADD_TODO`. <ide> <ide> ```js <del>import { VisibilityFilters, ADD_TODO, TOGGLE_TODO } from './actions' <add>import { <add> ADD_TODO, <add> TOGGLE_TODO, <add> SET_VISIBILITY_FILTER, <add> VisibilityFilters <add>} from './actions' <ide> <ide> ... <ide>
1
Java
Java
remove exception declaration in abstract encoder
aaa128180958d4b6d420cb7e4a955bb2edab0c0b
<ide><path>spring-core/src/main/java/org/springframework/core/codec/AbstractSingleValueEncoder.java <ide> public final Flux<DataBuffer> encode(Publisher<? extends T> inputStream, DataBuf <ide> <ide> return Flux.from(inputStream). <ide> take(1). <del> concatMap(t -> { <del> try { <del> return encode(t, bufferFactory, elementType, mimeType, hints); <del> } <del> catch (Exception ex) { <del> return Flux.error(ex); <del> } <del> }); <add> concatMap(t -> encode(t, bufferFactory, elementType, mimeType, hints)); <ide> } <ide> <ide> /** <ide> public final Flux<DataBuffer> encode(Publisher<? extends T> inputStream, DataBuf <ide> * @param mimeType the mime type to process <ide> * @param hints Additional information about how to do decode, optional <ide> * @return the output stream <del> * @throws Exception in case of errors <ide> */ <ide> protected abstract Flux<DataBuffer> encode(T t, DataBufferFactory dataBufferFactory, <del> ResolvableType type, MimeType mimeType, Map<String, Object> hints) throws Exception; <add> ResolvableType type, MimeType mimeType, Map<String, Object> hints); <ide> <ide> } <ide><path>spring-core/src/main/java/org/springframework/core/codec/ResourceEncoder.java <ide> public boolean canEncode(ResolvableType elementType, MimeType mimeType, Map<Stri <ide> <ide> @Override <ide> protected Flux<DataBuffer> encode(Resource resource, DataBufferFactory dataBufferFactory, <del> ResolvableType type, MimeType mimeType, Map<String, Object> hints) throws IOException { <add> ResolvableType type, MimeType mimeType, Map<String, Object> hints) { <ide> <del> ReadableByteChannel channel = resource.readableChannel(); <del> return DataBufferUtils.read(channel, dataBufferFactory, bufferSize); <add> try { <add> ReadableByteChannel channel = resource.readableChannel(); <add> return DataBufferUtils.read(channel, dataBufferFactory, this.bufferSize); <add> } <add> catch (IOException ex) { <add> return Flux.error(ex); <add> } <ide> } <ide> <ide> }
2
Go
Go
fix empty-lines (revive)
0c7b93095219e0856eaa50fedc5ec53bea243c76
<ide><path>daemon/cluster/convert/container.go <ide> func configReferencesToGRPC(sr []*types.ConfigReference) ([]*swarmapi.ConfigRefe <ide> func configReferencesFromGRPC(sr []*swarmapi.ConfigReference) []*types.ConfigReference { <ide> refs := make([]*types.ConfigReference, 0, len(sr)) <ide> for _, s := range sr { <del> <ide> r := &types.ConfigReference{ <ide> ConfigID: s.ConfigID, <ide> ConfigName: s.ConfigName, <ide><path>daemon/cluster/convert/network.go <ide> func endpointFromGRPC(e *swarmapi.Endpoint) types.Endpoint { <ide> NetworkID: v.NetworkID, <ide> Addr: v.Addr}) <ide> } <del> <ide> } <ide> <ide> return endpoint <ide><path>daemon/cluster/convert/service.go <ide> func serviceSpecFromGRPC(spec *swarmapi.ServiceSpec) (*types.ServiceSpec, error) <ide> for _, n := range spec.Networks { <ide> netConfig := types.NetworkAttachmentConfig{Target: n.Target, Aliases: n.Aliases, DriverOpts: n.DriverAttachmentOpts} <ide> serviceNetworks = append(serviceNetworks, netConfig) <del> <ide> } <ide> <ide> taskTemplate, err := taskSpecFromGRPC(spec.Task) <ide> func ServiceSpecToGRPC(s types.ServiceSpec) (swarmapi.ServiceSpec, error) { <ide> for _, n := range s.TaskTemplate.Networks { <ide> netConfig := &swarmapi.NetworkAttachmentConfig{Target: n.Target, Aliases: n.Aliases, DriverAttachmentOpts: n.DriverOpts} <ide> taskNetworks = append(taskNetworks, netConfig) <del> <ide> } <ide> <ide> spec := swarmapi.ServiceSpec{ <ide> func resourcesToGRPC(res *types.ResourceRequirements) *swarmapi.ResourceRequirem <ide> MemoryBytes: res.Reservations.MemoryBytes, <ide> Generic: GenericResourcesToGRPC(res.Reservations.GenericResources), <ide> } <del> <ide> } <ide> } <ide> return reqs <ide> func restartPolicyToGRPC(p *types.RestartPolicy) (*swarmapi.RestartPolicy, error <ide> } <ide> if p.MaxAttempts != nil { <ide> rp.MaxAttempts = *p.MaxAttempts <del> <ide> } <ide> } <ide> return rp, nil <ide><path>daemon/cluster/executor/container/adapter.go <ide> func (c *containerAdapter) createVolumes(ctx context.Context) error { <ide> // It returns an error if the driver name is different - that is a valid error <ide> return err <ide> } <del> <ide> } <ide> <ide> return nil <ide><path>daemon/cluster/executor/container/health_test.go <ide> import ( <ide> ) <ide> <ide> func TestHealthStates(t *testing.T) { <del> <ide> // set up environment: events, task, container .... <ide> e := events.New() <ide> _, l, _ := e.Subscribe() <ide><path>daemon/cluster/services.go <ide> func (c *Cluster) CreateService(s swarm.ServiceSpec, encodedAuth string, queryRe <ide> logrus.Warnf("unable to pin image %s to digest: %s", ctnr.Image, err.Error()) <ide> // warning in the client response should be concise <ide> resp.Warnings = append(resp.Warnings, digestWarning(ctnr.Image)) <del> <ide> } else if ctnr.Image != digestImage { <ide> logrus.Debugf("pinning image %s by digest: %s", ctnr.Image, digestImage) <ide> ctnr.Image = digestImage <del> <ide> } else { <ide> logrus.Debugf("creating service using supplied digest reference %s", ctnr.Image) <del> <ide> } <ide> <ide> // Replace the context with a fresh one. <ide> func (c *Cluster) UpdateService(serviceIDOrName string, version uint64, spec swa <ide> var resp *types.ServiceUpdateResponse <ide> <ide> err := c.lockedManagerAction(func(ctx context.Context, state nodeState) error { <del> <ide> err := c.populateNetworkID(ctx, state.controlClient, &spec) <ide> if err != nil { <ide> return err
6
Ruby
Ruby
convert x11requirement test to spec
26c01b4c43c5bf45799d81cd0e86ebca61a328c6
<ide><path>Library/Homebrew/test/os/mac/x11_requirement_test.rb <del>require "testing_env" <del>require "requirements/x11_requirement" <del> <del>class OSMacX11RequirementTests < Homebrew::TestCase <del> def test_satisfied <del> MacOS::XQuartz.stubs(:version).returns("2.7.5") <del> MacOS::XQuartz.stubs(:installed?).returns(true) <del> assert_predicate X11Requirement.new, :satisfied? <del> <del> MacOS::XQuartz.stubs(:installed?).returns(false) <del> refute_predicate X11Requirement.new, :satisfied? <del> end <del>end <ide><path>Library/Homebrew/test/x11_requirement_spec.rb <ide> subject.modify_build_environment <ide> end <ide> end <add> <add> describe "#satisfied?", :needs_macos do <add> it "returns true if X11 is installed" do <add> expect(MacOS::XQuartz).to receive(:version).and_return("2.7.5") <add> expect(MacOS::XQuartz).to receive(:installed?).and_return(true) <add> expect(subject).to be_satisfied <add> end <add> <add> it "returns false if X11 is not installed" do <add> expect(MacOS::XQuartz).to receive(:installed?).and_return(false) <add> expect(subject).not_to be_satisfied <add> end <add> end <ide> end
2
Ruby
Ruby
apply time column precision on assignment
4d9126cfccefdb69149caf7681d674b50335e9b4
<ide><path>activemodel/lib/active_model/type/time.rb <ide> def user_input_in_time_zone(value) <ide> private <ide> <ide> def cast_value(value) <del> return value unless value.is_a?(::String) <add> return apply_seconds_precision(value) unless value.is_a?(::String) <ide> return if value.empty? <ide> <ide> if value.start_with?("2000-01-01") <ide><path>activerecord/test/cases/date_time_precision_test.rb <ide> def test_datetime_data_type_with_precision <ide> assert_equal 5, Foo.columns_hash["updated_at"].precision <ide> end <ide> <add> def test_datetime_precision_is_truncated_on_assignment <add> @connection.create_table(:foos, force: true) <add> @connection.add_column :foos, :created_at, :datetime, precision: 0 <add> @connection.add_column :foos, :updated_at, :datetime, precision: 6 <add> <add> time = ::Time.now.change(nsec: 123456789) <add> foo = Foo.new(created_at: time, updated_at: time) <add> <add> assert_equal 0, foo.created_at.nsec <add> assert_equal 123456000, foo.updated_at.nsec <add> <add> foo.save! <add> foo.reload <add> <add> assert_equal 0, foo.created_at.nsec <add> assert_equal 123456000, foo.updated_at.nsec <add> end <add> <ide> def test_timestamps_helper_with_custom_precision <ide> @connection.create_table(:foos, force: true) do |t| <ide> t.timestamps precision: 4 <ide><path>activerecord/test/cases/time_precision_test.rb <ide> def test_time_data_type_with_precision <ide> assert_equal 6, Foo.columns_hash["finish"].precision <ide> end <ide> <add> def test_time_precision_is_truncated_on_assignment <add> @connection.create_table(:foos, force: true) <add> @connection.add_column :foos, :start, :time, precision: 0 <add> @connection.add_column :foos, :finish, :time, precision: 6 <add> <add> time = ::Time.now.change(nsec: 123456789) <add> foo = Foo.new(start: time, finish: time) <add> <add> assert_equal 0, foo.start.nsec <add> assert_equal 123456000, foo.finish.nsec <add> <add> foo.save! <add> foo.reload <add> <add> assert_equal 0, foo.start.nsec <add> assert_equal 123456000, foo.finish.nsec <add> end <add> <ide> def test_passing_precision_to_time_does_not_set_limit <ide> @connection.create_table(:foos, force: true) do |t| <ide> t.time :start, precision: 3
3
Text
Text
fix typo in part-8-rtk-query-advanced.md
8685630b415ee7a81718032b72f648b7d3e7a741
<ide><path>docs/tutorials/essentials/part-8-rtk-query-advanced.md <ide> In this case, we'll call the field `postsForUser`, and we can destructure that n <ide> We've now seen three different ways that we can manage transforming responses: <ide> <ide> - Keep original response in cache, read full result in component and derive values <del>- Keep original response in cache, read derived result with `selectFromResponse` <add>- Keep original response in cache, read derived result with `selectFromResult` <ide> - Transform response before storing in cache <ide> <ide> Each of these approaches can be useful in different situations. Here's some suggestions for when you should consider using them: <ide> Let's take one last look at the whole application in action: <ide> - Endpoint objects include functions for initating requests, generating result selectors, and matching request action objects <ide> - **Responses can be transformed in different ways as needed** <ide> - Endpoints can define a `transformResponse` callback to modify the data before caching <del> - Hooks can be given a `selectFromResponse` option to extract/transform data <add> - Hooks can be given a `selectFromResult` option to extract/transform data <ide> - Components can read an entire value and transform with `useMemo` <ide> - **RTK Query has advanced options for manipulating cached data for better user experience** <ide> - The `onQueryStarted` lifecycle can be used for optimistic updates by updating cache immediately before a request returns
1
Python
Python
improve detr post-processing methods
01eb34ab45a8895fbd9e335568290e5d0f5f4491
<ide><path>src/transformers/models/conditional_detr/feature_extraction_conditional_detr.py <ide> <ide> import io <ide> import pathlib <add>import warnings <ide> from collections import defaultdict <ide> from typing import Dict, List, Optional, Union <ide> <ide> def __call__( <ide> if annotations is not None: <ide> annotations = [annotations] <ide> <del> # Create deep copies to avoid editing inputs in place <add> # Create a copy of the list to avoid editing it in place <ide> images = [image for image in images] <ide> <ide> if annotations is not None: <ide> def post_process_segmentation(self, outputs, target_sizes, threshold=0.9, mask_t <ide> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels, and masks for an image <ide> in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_segmentation` is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_semantic_segmentation`.", <add> FutureWarning, <add> ) <ide> out_logits, raw_masks = outputs.logits, outputs.pred_masks <ide> preds = [] <ide> <ide> def post_process_instance(self, results, outputs, orig_target_sizes, max_target_ <ide> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels, boxes and masks for an <ide> image in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_instance` is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_instance_segmentation`.", <add> FutureWarning, <add> ) <ide> <ide> if len(orig_target_sizes) != len(max_target_sizes): <ide> raise ValueError("Make sure to pass in as many orig_target_sizes as max_target_sizes") <ide> def post_process_panoptic(self, outputs, processed_sizes, target_sizes=None, is_ <ide> `List[Dict]`: A list of dictionaries, each dictionary containing a PNG string and segments_info values for <ide> an image in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_panoptic is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_panoptic_segmentation`.", <add> FutureWarning, <add> ) <ide> if target_sizes is None: <ide> target_sizes = processed_sizes <ide> if len(processed_sizes) != len(target_sizes): <ide><path>src/transformers/models/conditional_detr/modeling_conditional_detr.py <ide> class ConditionalDetrObjectDetectionOutput(ModelOutput): <ide> pred_boxes (`torch.FloatTensor` of shape `(batch_size, num_queries, 4)`): <ide> Normalized boxes coordinates for all queries, represented as (center_x, center_y, width, height). These <ide> values are normalized in [0, 1], relative to the size of each individual image in the batch (disregarding <del> possible padding). You can use [`~ConditionalDetrFeatureExtractor.post_process`] to retrieve the <del> unnormalized bounding boxes. <add> possible padding). You can use [`~ConditionalDetrFeatureExtractor.post_process_object_detection`] to <add> retrieve the unnormalized bounding boxes. <ide> auxiliary_outputs (`list[Dict]`, *optional*): <ide> Optional, only returned when auxilary losses are activated (i.e. `config.auxiliary_loss` is set to `True`) <ide> and labels are provided. It is a list of dictionaries containing the two above keys (`logits` and <ide> class ConditionalDetrSegmentationOutput(ModelOutput): <ide> pred_boxes (`torch.FloatTensor` of shape `(batch_size, num_queries, 4)`): <ide> Normalized boxes coordinates for all queries, represented as (center_x, center_y, width, height). These <ide> values are normalized in [0, 1], relative to the size of each individual image in the batch (disregarding <del> possible padding). You can use [`~ConditionalDetrFeatureExtractor.post_process`] to retrieve the <del> unnormalized bounding boxes. <add> possible padding). You can use [`~ConditionalDetrFeatureExtractor.post_process_object_detection`] to <add> retrieve the unnormalized bounding boxes. <ide> pred_masks (`torch.FloatTensor` of shape `(batch_size, num_queries, height/4, width/4)`): <ide> Segmentation masks logits for all queries. See also <del> [`~ConditionalDetrFeatureExtractor.post_process_segmentation`] or <del> [`~ConditionalDetrFeatureExtractor.post_process_panoptic`] to evaluate instance and panoptic segmentation <del> masks respectively. <add> [`~ConditionalDetrFeatureExtractor.post_process_semantic_segmentation`] or <add> [`~ConditionalDetrFeatureExtractor.post_process_instance_segmentation`] <add> [`~ConditionalDetrFeatureExtractor.post_process_panoptic_segmentation`] to evaluate semantic, instance and <add> panoptic segmentation masks respectively. <ide> auxiliary_outputs (`list[Dict]`, *optional*): <ide> Optional, only returned when auxiliary losses are activated (i.e. `config.auxiliary_loss` is set to `True`) <ide> and labels are provided. It is a list of dictionaries containing the two above keys (`logits` and <ide><path>src/transformers/models/deformable_detr/feature_extraction_deformable_detr.py <ide> <ide> import io <ide> import pathlib <add>import warnings <ide> from collections import defaultdict <ide> from typing import Dict, List, Optional, Union <ide> <ide> def __call__( <ide> if annotations is not None: <ide> annotations = [annotations] <ide> <del> # Create deep copies to avoid editing inputs in place <add> # Create a copy of the list to avoid editing it in place <ide> images = [image for image in images] <ide> <ide> if annotations is not None: <ide> def post_process_segmentation(self, outputs, target_sizes, threshold=0.9, mask_t <ide> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels, and masks for an image <ide> in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_segmentation` is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_semantic_segmentation`.", <add> FutureWarning, <add> ) <ide> out_logits, raw_masks = outputs.logits, outputs.pred_masks <ide> preds = [] <ide> <ide> def post_process_instance(self, results, outputs, orig_target_sizes, max_target_ <ide> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels, boxes and masks for an <ide> image in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_instance` is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_instance_segmentation`.", <add> FutureWarning, <add> ) <ide> <ide> if len(orig_target_sizes) != len(max_target_sizes): <ide> raise ValueError("Make sure to pass in as many orig_target_sizes as max_target_sizes") <ide> def post_process_panoptic(self, outputs, processed_sizes, target_sizes=None, is_ <ide> `List[Dict]`: A list of dictionaries, each dictionary containing a PNG string and segments_info values for <ide> an image in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_panoptic is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_panoptic_segmentation`.", <add> FutureWarning, <add> ) <ide> if target_sizes is None: <ide> target_sizes = processed_sizes <ide> if len(processed_sizes) != len(target_sizes): <ide><path>src/transformers/models/detr/feature_extraction_detr.py <ide> <ide> import io <ide> import pathlib <add>import warnings <ide> from collections import defaultdict <del>from typing import Dict, List, Optional, Union <add>from typing import Dict, List, Optional, Set, Tuple, Union <ide> <ide> import numpy as np <ide> from PIL import Image <ide> def id_to_rgb(id_map): <ide> return color <ide> <ide> <add>def binary_mask_to_rle(mask): <add> """ <add> Args: <add> Converts given binary mask of shape (height, width) to the run-length encoding (RLE) format. <add> mask (`torch.Tensor` or `numpy.array`): <add> A binary mask tensor of shape `(height, width)` where 0 denotes background and 1 denotes the target <add> segment_id or class_id. <add> Returns: <add> `List`: Run-length encoded list of the binary mask. Refer to COCO API for more information about the RLE <add> format. <add> """ <add> if is_torch_tensor(mask): <add> mask = mask.numpy() <add> <add> pixels = mask.flatten() <add> pixels = np.concatenate([[0], pixels, [0]]) <add> runs = np.where(pixels[1:] != pixels[:-1])[0] + 1 <add> runs[1::2] -= runs[::2] <add> return [x for x in runs] <add> <add> <add>def remove_low_and_no_objects(masks, scores, labels, object_mask_threshold, num_labels): <add> """ <add> Args: <add> Binarize the given masks using `object_mask_threshold`, it returns the associated values of `masks`, `scores` <add> and `labels`. <add> masks (`torch.Tensor`): <add> A tensor of shape `(num_queries, height, width)`. <add> scores (`torch.Tensor`): <add> A tensor of shape `(num_queries)`. <add> labels (`torch.Tensor`): <add> A tensor of shape `(num_queries)`. <add> object_mask_threshold (`float`): <add> A number between 0 and 1 used to binarize the masks. <add> Raises: <add> `ValueError`: Raised when the first dimension doesn't match in all input tensors. <add> Returns: <add> `Tuple[`torch.Tensor`, `torch.Tensor`, `torch.Tensor`]`: The `masks`, `scores` and `labels` without the region <add> < `object_mask_threshold`. <add> """ <add> if not (masks.shape[0] == scores.shape[0] == labels.shape[0]): <add> raise ValueError("mask, scores and labels must have the same shape!") <add> <add> to_keep = labels.ne(num_labels) & (scores > object_mask_threshold) <add> <add> return masks[to_keep], scores[to_keep], labels[to_keep] <add> <add> <ide> class DetrFeatureExtractor(FeatureExtractionMixin, ImageFeatureExtractionMixin): <ide> r""" <ide> Constructs a DETR feature extractor. <ide> def __call__( <ide> if annotations is not None: <ide> annotations = [annotations] <ide> <del> # Create deep copies to avoid editing inputs in place <add> # Create a copy of the list to avoid editing it in place <ide> images = [image for image in images] <ide> <ide> if annotations is not None: <ide> def post_process(self, outputs, target_sizes): <ide> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels and boxes for an image <ide> in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process` is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_object_detection`", <add> FutureWarning, <add> ) <add> <ide> out_logits, out_bbox = outputs.logits, outputs.pred_boxes <ide> <ide> if len(out_logits) != len(target_sizes): <ide> def post_process(self, outputs, target_sizes): <ide> boxes = boxes * scale_fct[:, None, :] <ide> <ide> results = [{"scores": s, "labels": l, "boxes": b} for s, l, b in zip(scores, labels, boxes)] <del> <ide> return results <ide> <ide> def post_process_segmentation(self, outputs, target_sizes, threshold=0.9, mask_threshold=0.5): <ide> def post_process_segmentation(self, outputs, target_sizes, threshold=0.9, mask_t <ide> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels, and masks for an image <ide> in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_segmentation` is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_semantic_segmentation`.", <add> FutureWarning, <add> ) <ide> out_logits, raw_masks = outputs.logits, outputs.pred_masks <ide> preds = [] <ide> <ide> def post_process_instance(self, results, outputs, orig_target_sizes, max_target_ <ide> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels, boxes and masks for an <ide> image in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_instance` is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_instance_segmentation`.", <add> FutureWarning, <add> ) <ide> <ide> if len(orig_target_sizes) != len(max_target_sizes): <ide> raise ValueError("Make sure to pass in as many orig_target_sizes as max_target_sizes") <ide> def post_process_panoptic(self, outputs, processed_sizes, target_sizes=None, is_ <ide> `List[Dict]`: A list of dictionaries, each dictionary containing a PNG string and segments_info values for <ide> an image in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_panoptic is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_panoptic_segmentation`.", <add> FutureWarning, <add> ) <ide> if target_sizes is None: <ide> target_sizes = processed_sizes <ide> if len(processed_sizes) != len(target_sizes): <ide> def get_ids_area(masks, scores, dedup=False): <ide> predictions = {"png_string": out.getvalue(), "segments_info": segments_info} <ide> preds.append(predictions) <ide> return preds <add> <add> def post_process_object_detection( <add> self, outputs, threshold: float = 0.5, target_sizes: Union[TensorType, List[Tuple]] = None <add> ): <add> """ <add> Converts the output of [`DetrForObjectDetection`] into the format expected by the COCO api. Only supports <add> PyTorch. <add> <add> Args: <add> outputs ([`DetrObjectDetectionOutput`]): <add> Raw outputs of the model. <add> threshold (`float`, *optional*): <add> Score threshold to keep object detection predictions. <add> target_sizes (`torch.Tensor` or `List[Tuple[int, int]]`, *optional*, defaults to `None`): <add> Tensor of shape `(batch_size, 2)` or list of tuples (`Tuple[int, int]`) containing the target size <add> (height, width) of each image in the batch. If left to None, predictions will not be resized. <add> <add> Returns: <add> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels and boxes for an image <add> in the batch as predicted by the model. <add> """ <add> out_logits, out_bbox = outputs.logits, outputs.pred_boxes <add> <add> if target_sizes is not None: <add> if len(out_logits) != len(target_sizes): <add> raise ValueError( <add> "Make sure that you pass in as many target sizes as the batch dimension of the logits" <add> ) <add> <add> prob = nn.functional.softmax(out_logits, -1) <add> scores, labels = prob[..., :-1].max(-1) <add> <add> # Convert to [x0, y0, x1, y1] format <add> boxes = center_to_corners_format(out_bbox) <add> <add> # Convert from relative [0, 1] to absolute [0, height] coordinates <add> if target_sizes is not None: <add> if isinstance(target_sizes, List): <add> img_h = torch.Tensor([i[0] for i in target_sizes]) <add> img_w = torch.Tensor([i[1] for i in target_sizes]) <add> else: <add> img_h, img_w = target_sizes.unbind(1) <add> <add> scale_fct = torch.stack([img_w, img_h, img_w, img_h], dim=1) <add> boxes = boxes * scale_fct[:, None, :] <add> <add> results = [] <add> for s, l, b in zip(scores, labels, boxes): <add> score = s[s > threshold] <add> label = l[s > threshold] <add> box = b[s > threshold] <add> results.append({"scores": score, "labels": label, "boxes": box}) <add> <add> return results <add> <add> def post_process_semantic_segmentation(self, outputs, target_sizes: List[Tuple[int, int]] = None): <add> """ <add> Args: <add> Converts the output of [`DetrForSegmentation`] into semantic segmentation maps. Only supports PyTorch. <add> outputs ([`DetrForSegmentation`]): <add> Raw outputs of the model. <add> target_sizes (`List[Tuple[int, int]]`, *optional*, defaults to `None`): <add> A list of tuples (`Tuple[int, int]`) containing the target size (height, width) of each image in the <add> batch. If left to None, predictions will not be resized. <add> Returns: <add> `List[torch.Tensor]`: <add> A list of length `batch_size`, where each item is a semantic segmentation map of shape (height, width) <add> corresponding to the target_sizes entry (if `target_sizes` is specified). Each entry of each <add> `torch.Tensor` correspond to a semantic class id. <add> """ <add> class_queries_logits = outputs.logits # [batch_size, num_queries, num_classes+1] <add> masks_queries_logits = outputs.pred_masks # [batch_size, num_queries, height, width] <add> <add> # Remove the null class `[..., :-1]` <add> masks_classes = class_queries_logits.softmax(dim=-1)[..., :-1] <add> masks_probs = masks_queries_logits.sigmoid() # [batch_size, num_queries, height, width] <add> <add> # Semantic segmentation logits of shape (batch_size, num_classes, height, width) <add> segmentation = torch.einsum("bqc, bqhw -> bchw", masks_classes, masks_probs) <add> batch_size = class_queries_logits.shape[0] <add> <add> # Resize logits and compute semantic segmentation maps <add> if target_sizes is not None: <add> if batch_size != len(target_sizes): <add> raise ValueError( <add> "Make sure that you pass in as many target sizes as the batch dimension of the logits" <add> ) <add> <add> semantic_segmentation = [] <add> for idx in range(batch_size): <add> resized_logits = torch.nn.functional.interpolate( <add> segmentation[idx].unsqueeze(dim=0), size=target_sizes[idx], mode="bilinear", align_corners=False <add> ) <add> semantic_map = resized_logits[0].argmax(dim=0) <add> semantic_segmentation.append(semantic_map) <add> else: <add> semantic_segmentation = segmentation.argmax(dim=1) <add> semantic_segmentation = [semantic_segmentation[i] for i in range(semantic_segmentation.shape[0])] <add> <add> return semantic_segmentation <add> <add> def post_process_instance_segmentation( <add> self, <add> outputs, <add> threshold: float = 0.5, <add> overlap_mask_area_threshold: float = 0.8, <add> target_sizes: List[Tuple] = None, <add> return_coco_annotation: Optional[bool] = False, <add> ): <add> """ <add> Args: <add> Converts the output of [`DetrForSegmentation`] into instance segmentation predictions. Only supports PyTorch. <add> outputs ([`DetrForSegmentation`]): <add> Raw outputs of the model. <add> threshold (`float`, *optional*): <add> The probability score threshold to keep predicted instance masks, defaults to 0.5. <add> overlap_mask_area_threshold (`float`, *optional*): <add> The overlap mask area threshold to merge or discard small disconnected parts within each binary <add> instance mask, defaults to 0.8. <add> target_sizes (`List[Tuple]`, *optional*, defaults to `None`): <add> List of length (batch_size), where each list item (`Tuple[int, int]]`) corresponds to the requested <add> final size (height, width) of each prediction. If left to None, predictions will not be resized. <add> return_coco_annotation (`bool`, *optional*, defaults to `False`): <add> If set to `True`, segmentation maps are returned in COCO run-length encoding (RLE) format. <add> Returns: <add> `List[Dict]`: A list of dictionaries, one per image, each dictionary containing two keys: <add> - **segmentation** -- A tensor of shape `(height, width)` where each pixel represents a `segment_id` or <add> `List[List]` run-length encoding (RLE) of the segmentation map if return_coco_format is set to `True`. <add> - **segment_ids** -- A dictionary that maps segment ids to semantic class ids. <add> - **id** -- An integer representing the `segment_id`. <add> - **label_id** -- An integer representing the segment's label / semantic class id. <add> """ <add> class_queries_logits = outputs.logits # [batch_size, num_queries, num_classes+1] <add> masks_queries_logits = outputs.pred_masks # [batch_size, num_queries, height, width] <add> <add> batch_size = class_queries_logits.shape[0] <add> num_labels = class_queries_logits.shape[-1] - 1 <add> <add> mask_probs = masks_queries_logits.sigmoid() # [batch_size, num_queries, height, width] <add> <add> # Predicted label and score of each query (batch_size, num_queries) <add> pred_scores, pred_labels = nn.functional.softmax(class_queries_logits, dim=-1).max(-1) <add> <add> # Loop over items in batch size <add> results: List[Dict[str, TensorType]] = [] <add> <add> for i in range(batch_size): <add> mask_probs_item, pred_scores_item, pred_labels_item = remove_low_and_no_objects( <add> mask_probs[i], pred_scores[i], pred_labels[i], threshold, num_labels <add> ) <add> <add> height, width = target_sizes[i][0], target_sizes[i][1] <add> segmentation = torch.zeros((height, width), dtype=torch.int32, device=mask_probs_item.device) <add> segments: List[Dict] = [] <add> <add> object_detected = mask_probs_item.shape[0] > 0 <add> <add> if object_detected: <add> # Resize mask to corresponding target_size <add> if target_sizes is not None: <add> mask_probs_item = torch.nn.functional.interpolate( <add> mask_probs_item.unsqueeze(0), <add> size=target_sizes[i], <add> mode="bilinear", <add> align_corners=False, <add> )[0] <add> <add> current_segment_id = 0 <add> <add> # Weigh each mask by its prediction score <add> mask_probs_item *= pred_scores_item.view(-1, 1, 1) <add> mask_labels_item = mask_probs_item.argmax(0) # [height, width] <add> <add> # Keep track of instances of each class <add> stuff_memory_list: Dict[str, int] = {} <add> for k in range(pred_labels_item.shape[0]): <add> # Get the mask associated with the k class <add> pred_class = pred_labels_item[k].item() <add> mask_k = mask_labels_item == k <add> mask_k_area = mask_k.sum() <add> <add> # Compute the area of all the stuff in query k <add> original_area = (mask_probs_item[k] >= 0.5).sum() <add> mask_exists = mask_k_area > 0 and original_area > 0 <add> <add> if mask_exists: <add> # Eliminate segments with mask area below threshold <add> area_ratio = mask_k_area / original_area <add> if not area_ratio.item() > overlap_mask_area_threshold: <add> continue <add> <add> # Add corresponding class id <add> if pred_class in stuff_memory_list: <add> current_segment_id = stuff_memory_list[pred_class] <add> else: <add> current_segment_id += 1 <add> <add> # Add current object segment to final segmentation map <add> segmentation[mask_k] = current_segment_id <add> segments.append( <add> { <add> "id": current_segment_id, <add> "label_id": pred_class, <add> } <add> ) <add> else: <add> segmentation -= 1 <add> <add> # Return segmentation map in run-length encoding (RLE) format <add> if return_coco_annotation: <add> segment_ids = torch.unique(segmentation) <add> <add> run_length_encodings = [] <add> for idx in segment_ids: <add> mask = torch.where(segmentation == idx, 1, 0) <add> rle = binary_mask_to_rle(mask) <add> run_length_encodings.append(rle) <add> <add> segmentation = run_length_encodings <add> <add> results.append({"segmentation": segmentation, "segment_ids": segments}) <add> return results <add> <add> def post_process_panoptic_segmentation( <add> self, <add> outputs, <add> threshold: float = 0.5, <add> overlap_mask_area_threshold: float = 0.8, <add> label_ids_to_fuse: Optional[Set[int]] = None, <add> target_sizes: List[Tuple] = None, <add> ) -> List[Dict]: <add> """ <add> Args: <add> Converts the output of [`DetrForSegmentation`] into image panoptic segmentation predictions. Only supports <add> PyTorch. <add> outputs ([`DetrForSegmentation`]): <add> The outputs from [`DetrForSegmentation`]. <add> threshold (`float`, *optional*, defaults to 0.5): <add> The probability score threshold to keep predicted instance masks. <add> overlap_mask_area_threshold (`float`, *optional*, defaults to 0.8): <add> The overlap mask area threshold to merge or discard small disconnected parts within each binary <add> instance mask. <add> label_ids_to_fuse (`Set[int]`, *optional*, defaults to `None`): <add> The labels in this state will have all their instances be fused together. For instance we could say <add> there can only be one sky in an image, but several persons, so the label ID for sky would be in that <add> set, but not the one for person. <add> target_sizes (`List[Tuple]`, *optional*): <add> List of length (batch_size), where each list item (`Tuple[int, int]]`) corresponds to the requested <add> final size (height, width) of each prediction in batch. If left to None, predictions will not be <add> resized. <add> Returns: <add> `List[Dict]`: A list of dictionaries, one per image, each dictionary containing two keys: <add> - **segmentation** -- a tensor of shape `(height, width)` where each pixel represents a `segment_id`. If <add> `target_sizes` is specified, segmentation is resized to the corresponding `target_sizes` entry. <add> - **segment_ids** -- A dictionary that maps segment ids to semantic class ids. <add> - **id** -- An integer representing the `segment_id`. <add> - **label_id** -- An integer representing the segment's label / semantic class id. <add> - **was_fused** -- a boolean, `True` if `label_id` was in `label_ids_to_fuse`, `False` otherwise. <add> Multiple instances of the same class / label were fused and assigned a single `segment_id`. <add> """ <add> <add> if label_ids_to_fuse is None: <add> warnings.warn("`label_ids_to_fuse` unset. No instance will be fused.") <add> label_ids_to_fuse = set() <add> <add> class_queries_logits = outputs.logits # [batch_size, num_queries, num_classes+1] <add> masks_queries_logits = outputs.pred_masks # [batch_size, num_queries, height, width] <add> <add> batch_size = class_queries_logits.shape[0] <add> num_labels = class_queries_logits.shape[-1] - 1 <add> <add> mask_probs = masks_queries_logits.sigmoid() # [batch_size, num_queries, height, width] <add> <add> # Predicted label and score of each query (batch_size, num_queries) <add> pred_scores, pred_labels = nn.functional.softmax(class_queries_logits, dim=-1).max(-1) <add> <add> # Loop over items in batch size <add> results: List[Dict[str, TensorType]] = [] <add> <add> for i in range(batch_size): <add> mask_probs_item, pred_scores_item, pred_labels_item = remove_low_and_no_objects( <add> mask_probs[i], pred_scores[i], pred_labels[i], threshold, num_labels <add> ) <add> <add> height, width = target_sizes[i][0], target_sizes[i][1] <add> segmentation = torch.zeros((height, width), dtype=torch.int32, device=mask_probs_item.device) <add> segments: List[Dict] = [] <add> <add> object_detected = mask_probs_item.shape[0] > 0 <add> <add> if object_detected: <add> # Resize mask to corresponding target_size <add> if target_sizes is not None: <add> mask_probs_item = torch.nn.functional.interpolate( <add> mask_probs_item.unsqueeze(0), <add> size=target_sizes[i], <add> mode="bilinear", <add> align_corners=False, <add> )[0] <add> <add> current_segment_id = 0 <add> <add> # Weigh each mask by its prediction score <add> mask_probs_item *= pred_scores_item.view(-1, 1, 1) <add> mask_labels_item = mask_probs_item.argmax(0) # [height, width] <add> <add> # Keep track of instances of each class <add> stuff_memory_list: Dict[str, int] = {} <add> for k in range(pred_labels_item.shape[0]): <add> pred_class = pred_labels_item[k].item() <add> should_fuse = pred_class in label_ids_to_fuse <add> <add> # Get the mask associated with the k class <add> mask_k = mask_labels_item == k <add> mask_k_area = mask_k.sum() <add> <add> # Compute the area of all the stuff in query k <add> original_area = (mask_probs_item[k] >= 0.5).sum() <add> mask_exists = mask_k_area > 0 and original_area > 0 <add> <add> if mask_exists: <add> # Eliminate disconnected tiny segments <add> area_ratio = mask_k_area / original_area <add> if not area_ratio.item() > overlap_mask_area_threshold: <add> continue <add> <add> # Add corresponding class id <add> if pred_class in stuff_memory_list: <add> current_segment_id = stuff_memory_list[pred_class] <add> else: <add> current_segment_id += 1 <add> <add> # Add current object segment to final segmentation map <add> segmentation[mask_k] = current_segment_id <add> segments.append( <add> { <add> "id": current_segment_id, <add> "label_id": pred_class, <add> "was_fused": should_fuse, <add> } <add> ) <add> if should_fuse: <add> stuff_memory_list[pred_class] = current_segment_id <add> else: <add> segmentation -= 1 <add> <add> results.append({"segmentation": segmentation, "segment_ids": segments}) <add> return results <ide><path>src/transformers/models/detr/modeling_detr.py <ide> class DetrObjectDetectionOutput(ModelOutput): <ide> pred_boxes (`torch.FloatTensor` of shape `(batch_size, num_queries, 4)`): <ide> Normalized boxes coordinates for all queries, represented as (center_x, center_y, width, height). These <ide> values are normalized in [0, 1], relative to the size of each individual image in the batch (disregarding <del> possible padding). You can use [`~DetrFeatureExtractor.post_process`] to retrieve the unnormalized bounding <del> boxes. <add> possible padding). You can use [`~DetrFeatureExtractor.post_process_object_detection`] to retrieve the <add> unnormalized bounding boxes. <ide> auxiliary_outputs (`list[Dict]`, *optional*): <ide> Optional, only returned when auxilary losses are activated (i.e. `config.auxiliary_loss` is set to `True`) <ide> and labels are provided. It is a list of dictionaries containing the two above keys (`logits` and <ide> class DetrSegmentationOutput(ModelOutput): <ide> pred_boxes (`torch.FloatTensor` of shape `(batch_size, num_queries, 4)`): <ide> Normalized boxes coordinates for all queries, represented as (center_x, center_y, width, height). These <ide> values are normalized in [0, 1], relative to the size of each individual image in the batch (disregarding <del> possible padding). You can use [`~DetrFeatureExtractor.post_process`] to retrieve the unnormalized bounding <del> boxes. <add> possible padding). You can use [`~DetrFeatureExtractor.post_process_object_detection`] to retrieve the <add> unnormalized bounding boxes. <ide> pred_masks (`torch.FloatTensor` of shape `(batch_size, num_queries, height/4, width/4)`): <del> Segmentation masks logits for all queries. See also [`~DetrFeatureExtractor.post_process_segmentation`] or <del> [`~DetrFeatureExtractor.post_process_panoptic`] to evaluate instance and panoptic segmentation masks <del> respectively. <add> Segmentation masks logits for all queries. See also <add> [`~DetrFeatureExtractor.post_process_semantic_segmentation`] or <add> [`~DetrFeatureExtractor.post_process_instance_segmentation`] <add> [`~DetrFeatureExtractor.post_process_panoptic_segmentation`] to evaluate semantic, instance and panoptic <add> segmentation masks respectively. <ide> auxiliary_outputs (`list[Dict]`, *optional*): <ide> Optional, only returned when auxiliary losses are activated (i.e. `config.auxiliary_loss` is set to `True`) <ide> and labels are provided. It is a list of dictionaries containing the two above keys (`logits` and <ide> def forward( <ide> <ide> >>> # convert outputs (bounding boxes and class logits) to COCO API <ide> >>> target_sizes = torch.tensor([image.size[::-1]]) <del> >>> results = feature_extractor.post_process(outputs, target_sizes=target_sizes)[0] <add> >>> results = feature_extractor.post_process_object_detection(outputs, target_sizes=target_sizes)[0] <ide> <ide> >>> for score, label, box in zip(results["scores"], results["labels"], results["boxes"]): <ide> ... box = [round(i, 2) for i in box.tolist()] <ide> def forward( <ide> >>> # forward pass <ide> >>> outputs = model(**inputs) <ide> <del> >>> # use the `post_process_panoptic` method of `DetrFeatureExtractor` to convert to COCO format <del> >>> processed_sizes = torch.as_tensor(inputs["pixel_values"].shape[-2:]).unsqueeze(0) <del> >>> result = feature_extractor.post_process_panoptic(outputs, processed_sizes)[0] <del> <del> >>> # the segmentation is stored in a special-format png <del> >>> panoptic_seg = Image.open(io.BytesIO(result["png_string"])) <del> >>> panoptic_seg = numpy.array(panoptic_seg, dtype=numpy.uint8) <del> >>> # retrieve the ids corresponding to each mask <del> >>> panoptic_seg_id = rgb_to_id(panoptic_seg) <del> >>> panoptic_seg_id.shape <del> (800, 1066) <add> >>> # Use the `post_process_panoptic_segmentation` method of `DetrFeatureExtractor` to retrieve post-processed panoptic segmentation maps <add> >>> # Segmentation results are returned as a list of dictionaries <add> >>> result = feature_extractor.post_process_panoptic_segmentation(outputs, processed_sizes) <add> <add> >>> # A tensor of shape (height, width) where each value denotes a segment id <add> >>> panoptic_seg = result[0]["segmentation"] <add> >>> # Get mapping of segment ids to semantic class ids <add> >>> panoptic_segments_info = result[0]["segment_ids"] <ide> ```""" <ide> <ide> return_dict = return_dict if return_dict is not None else self.config.use_return_dict <ide><path>src/transformers/models/yolos/feature_extraction_yolos.py <ide> <ide> import io <ide> import pathlib <add>import warnings <ide> from collections import defaultdict <ide> from typing import Dict, List, Optional, Union <ide> <ide> def post_process(self, outputs, target_sizes): <ide> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels and boxes for an image <ide> in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process` is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_object_detection`", <add> FutureWarning, <add> ) <add> <ide> out_logits, out_bbox = outputs.logits, outputs.pred_boxes <ide> <ide> if len(out_logits) != len(target_sizes): <ide> def post_process(self, outputs, target_sizes): <ide> boxes = boxes * scale_fct[:, None, :] <ide> <ide> results = [{"scores": s, "labels": l, "boxes": b} for s, l, b in zip(scores, labels, boxes)] <del> <ide> return results <ide> <ide> # Copied from transformers.models.detr.feature_extraction_detr.DetrFeatureExtractor.post_process_segmentation <ide> def post_process_segmentation(self, outputs, target_sizes, threshold=0.9, mask_t <ide> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels, and masks for an image <ide> in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_segmentation` is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_semantic_segmentation`.", <add> FutureWarning, <add> ) <ide> out_logits, raw_masks = outputs.logits, outputs.pred_masks <ide> preds = [] <ide> <ide> def post_process_instance(self, results, outputs, orig_target_sizes, max_target_ <ide> `List[Dict]`: A list of dictionaries, each dictionary containing the scores, labels, boxes and masks for an <ide> image in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_instance` is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_instance_segmentation`.", <add> FutureWarning, <add> ) <ide> <ide> if len(orig_target_sizes) != len(max_target_sizes): <ide> raise ValueError("Make sure to pass in as many orig_target_sizes as max_target_sizes") <ide> def post_process_panoptic(self, outputs, processed_sizes, target_sizes=None, is_ <ide> `List[Dict]`: A list of dictionaries, each dictionary containing a PNG string and segments_info values for <ide> an image in the batch as predicted by the model. <ide> """ <add> warnings.warn( <add> "`post_process_panoptic is deprecated and will be removed in v5 of Transformers, please use" <add> " `post_process_panoptic_segmentation`.", <add> FutureWarning, <add> ) <ide> if target_sizes is None: <ide> target_sizes = processed_sizes <ide> if len(processed_sizes) != len(target_sizes):
6
Ruby
Ruby
remove buggy and unnecessary logic
4e35dc0ebec33905a05dca1fdeecc6a39e87e9cc
<ide><path>railties/lib/rails/generators/app_base.rb <ide> def assets_gemfile_entry <ide> GEMFILE <ide> end <ide> <del> if options[:skip_javascript] <del> gemfile += <<-GEMFILE.gsub(/^ {12}/, '') <del> #{coffee_gemfile_entry} <del> #{javascript_runtime_gemfile_entry(2)} <del> GEMFILE <del> end <del> <ide> gemfile.strip_heredoc.gsub(/^[ \t]*$/, '') <ide> end <ide>
1
PHP
PHP
fix issue with find(count) and translatebehavior
55e1619c59e5b01fcb540de468bce3dd3884d170
<ide><path>lib/Cake/Model/Behavior/TranslateBehavior.php <ide> public function afterFind(Model $Model, $results, $primary = false) { <ide> } <ide> } else { <ide> $value = ''; <del> if (is_numeric($row[$Model->alias][$aliasVirtual]) || !empty($row[$Model->alias][$aliasVirtual])) { <add> if (isset($row[$Model->alias][$aliasVirtual])) { <ide> $value = $row[$Model->alias][$aliasVirtual]; <ide> } <ide> $row[$Model->alias][$aliasField] = $value; <ide><path>lib/Cake/Test/Case/Model/Behavior/TranslateBehaviorTest.php <ide> public function testLocaleSingleCountWithConditions() { <ide> <ide> $TestModel = new TranslatedItem(); <ide> $TestModel->locale = 'eng'; <del> $result = $TestModel->find('all', array('conditions' => array('slug' => 'first_translated'))); <add> $result = $TestModel->find('all', array( <add> 'conditions' => array('slug' => 'first_translated') <add> )); <ide> $expected = array( <ide> array( <ide> 'TranslatedItem' => array( <ide> public function testLocaleSingleCountWithConditions() { <ide> ); <ide> $this->assertEquals($expected, $result); <ide> <del> $result = $TestModel->find('count', array('conditions' => "TranslatedItem.slug = 'first_translated'")); <add> $result = $TestModel->find('count', array( <add> 'conditions' => array('slug' => 'first_translated') <add> )); <ide> $expected = 1; <ide> $this->assertEquals($expected, $result); <del> } <add> } <ide> <ide> /** <ide> * testLocaleSingleAssociations method
2
Javascript
Javascript
improve error messages in test-npm-install
6ca1bdfeb158aa8a4b981b715278b30c4a019ff0
<ide><path>test/parallel/test-npm-install.js <ide> const proc = spawn(process.execPath, args, { <ide> }); <ide> <ide> function handleExit(code, signalCode) { <del> assert.strictEqual(code, 0, 'npm install should run without an error'); <del> assert.ok(signalCode === null, 'signalCode should be null'); <add> assert.strictEqual(code, 0, `npm install got error code ${code}`); <add> assert.strictEqual(signalCode, null, `unexpected signal: ${signalCode}`); <ide> assert.doesNotThrow(function() { <ide> fs.accessSync(installDir + '/node_modules/package-name'); <ide> });
1
Ruby
Ruby
use #grep where it will suffice
62a0b3f18dc8c52cce89943c82d1a232552e2493
<ide><path>Library/Homebrew/cmd/audit.rb <ide> def audit_urls <ide> urls = [(f.stable.url rescue nil), (f.devel.url rescue nil), (f.head.url rescue nil)].compact <ide> <ide> # Check GNU urls; doesn't apply to mirrors <del> urls.select { |u| u =~ %r[^(https?|ftp)://(?!alpha).+/gnu/] }.each do |u| <add> urls.grep(%r[^(https?|ftp)://(?!alpha).+/gnu/]).each do |u| <ide> problem "\"ftpmirror.gnu.org\" is preferred for GNU software (url is #{u})." <ide> end <ide> <ide> def audit_urls <ide> end <ide> <ide> # Check for git:// GitHub repo urls, https:// is preferred. <del> urls.select { |u| u =~ %r[^git://([^/])*github\.com/] }.each do |u| <add> urls.grep(%r[^git://([^/])*github\.com/]).each do |u| <ide> problem "Use https:// URLs for accessing GitHub repositories (url is #{u})." <ide> end <ide> <ide> # Check for http:// GitHub repo urls, https:// is preferred. <del> urls.select { |u| u =~ %r[^http://github\.com/.*\.git$] }.each do |u| <add> urls.grep(%r[^http://github\.com/.*\.git$]).each do |u| <ide> problem "Use https:// URLs for accessing GitHub repositories (url is #{u})." <ide> end <ide> <ide><path>Library/Homebrew/cmd/doctor.rb <ide> def doctor <ide> checks = Checks.new <ide> <ide> if ARGV.include? '--list-checks' <del> checks.methods.select { |m| m =~ /^check_/ }.sort.each { |m| puts m } <add> checks.methods.grep(/^check_/).sort.each { |m| puts m } <ide> exit <ide> end <ide> <ide> def doctor <ide> checks.methods.sort << "check_for_linked_keg_only_brews" << "check_for_outdated_homebrew" <ide> else <ide> ARGV.named <del> end.select{ |method| method =~ /^check_/ }.reverse.uniq.reverse <add> end.grep(/^check_/).reverse.uniq.reverse <ide> <ide> first_warning = true <ide> methods.each do |method|
2
Ruby
Ruby
use topological sort to upgrade formulae
0c3e49092c2e807092d157f3723d712a807e37b1
<ide><path>Library/Homebrew/cask.rb <ide> require "cask/pkg" <ide> require "cask/quarantine" <ide> require "cask/staged" <del>require "cask/topological_hash" <ide> require "cask/url" <ide> require "cask/utils" <ide><path>Library/Homebrew/cask/installer.rb <ide> <ide> require "formula_installer" <ide> require "unpack_strategy" <add>require "utils/topological_hash" <ide> <del>require "cask/topological_hash" <ide> require "cask/config" <ide> require "cask/download" <ide> require "cask/staged" <ide> def arch_dependencies <ide> "but you are running #{@current_arch}." <ide> end <ide> <del> def graph_dependencies(cask_or_formula, acc = TopologicalHash.new) <del> return acc if acc.key?(cask_or_formula) <del> <del> if cask_or_formula.is_a?(Cask) <del> formula_deps = cask_or_formula.depends_on.formula.map { |f| Formula[f] } <del> cask_deps = cask_or_formula.depends_on.cask.map { |c| CaskLoader.load(c, config: nil) } <del> else <del> formula_deps = cask_or_formula.deps.reject(&:build?).map(&:to_formula) <del> cask_deps = cask_or_formula.requirements.map(&:cask).compact <del> .map { |c| CaskLoader.load(c, config: nil) } <del> end <del> <del> acc[cask_or_formula] ||= [] <del> acc[cask_or_formula] += formula_deps <del> acc[cask_or_formula] += cask_deps <del> <del> formula_deps.each do |f| <del> graph_dependencies(f, acc) <del> end <del> <del> cask_deps.each do |c| <del> graph_dependencies(c, acc) <del> end <del> <del> acc <del> end <del> <ide> def collect_cask_and_formula_dependencies <ide> return @cask_and_formula_dependencies if @cask_and_formula_dependencies <ide> <del> graph = graph_dependencies(@cask) <add> graph = ::Utils::TopologicalHash.graph_package_dependencies(@cask) <ide> <ide> raise CaskSelfReferencingDependencyError, cask.token if graph[@cask].include?(@cask) <ide> <del> primary_container.dependencies.each do |dep| <del> graph_dependencies(dep, graph) <del> end <add> ::Utils::TopologicalHash.graph_package_dependencies(primary_container.dependencies, graph) <ide> <ide> begin <ide> @cask_and_formula_dependencies = graph.tsort - [@cask] <ide><path>Library/Homebrew/cask/topological_hash.rb <del># typed: true <del># frozen_string_literal: true <del> <del>require "tsort" <del> <del>module Cask <del> # Topologically sortable hash map. <del> class TopologicalHash < Hash <del> include TSort <del> <del> private <del> <del> def tsort_each_node(&block) <del> each_key(&block) <del> end <del> <del> def tsort_each_child(node, &block) <del> fetch(node).each(&block) <del> end <del> end <del>end <ide><path>Library/Homebrew/test/utils/topological_hash_spec.rb <add># typed: false <add># frozen_string_literal: true <add> <add>require "utils/topological_hash" <add> <add>describe Utils::TopologicalHash do <add> describe "#tsort" do <add> it "returns a topologically sorted array" do <add> hash = described_class.new <add> hash[1] = [2, 3] <add> hash[2] = [3] <add> hash[3] = [] <add> hash[4] = [] <add> expect(hash.tsort).to eq [3, 2, 1, 4] <add> end <add> end <add> <add> describe "#strongly_connected_components" do <add> it "returns an array of arrays" do <add> hash = described_class.new <add> hash[1] = [2] <add> hash[2] = [3, 4] <add> hash[3] = [2] <add> hash[4] = [] <add> expect(hash.strongly_connected_components).to eq [[4], [2, 3], [1]] <add> end <add> end <add> <add> describe "::graph_package_dependencies" do <add> it "returns a topological hash" do <add> formula1 = formula "homebrew-test-formula1" do <add> url "foo" <add> version "0.5" <add> end <add> <add> formula2 = formula "homebrew-test-formula2" do <add> url "foo" <add> version "0.5" <add> depends_on "homebrew-test-formula1" <add> end <add> <add> formula3 = formula "homebrew-test-formula3" do <add> url "foo" <add> version "0.5" <add> depends_on "homebrew-test-formula4" <add> end <add> <add> formula4 = formula "homebrew-test-formula4" do <add> url "foo" <add> version "0.5" <add> depends_on "homebrew-test-formula3" <add> end <add> <add> cask1 = Cask::Cask.new("homebrew-test-cask1") do <add> url "foo" <add> version "1.2.3" <add> end <add> <add> cask2 = Cask::Cask.new("homebrew-test-cask2") do <add> url "foo" <add> version "1.2.3" <add> depends_on cask: "homebrew-test-cask1" <add> depends_on formula: "homebrew-test-formula1" <add> end <add> <add> cask3 = Cask::Cask.new("homebrew-test-cask3") do <add> url "foo" <add> version "1.2.3" <add> depends_on cask: "homebrew-test-cask2" <add> end <add> <add> stub_formula_loader formula1 <add> stub_formula_loader formula2 <add> stub_formula_loader formula3 <add> stub_formula_loader formula4 <add> <add> stub_cask_loader cask1 <add> stub_cask_loader cask2 <add> stub_cask_loader cask3 <add> <add> packages = [formula1, formula2, formula3, formula4, cask1, cask2, cask3] <add> expect(described_class.graph_package_dependencies(packages)).to eq({ <add> formula1 => [], <add> formula2 => [formula1], <add> formula3 => [formula4], <add> formula4 => [formula3], <add> cask1 => [], <add> cask2 => [formula1, cask1], <add> cask3 => [cask2], <add> }) <add> <add> sorted = [formula1, cask1, cask2, cask3, formula2] <add> expect(described_class.graph_package_dependencies([cask3, cask2, cask1, formula2, formula1]).tsort).to eq sorted <add> expect(described_class.graph_package_dependencies([cask3, formula2]).tsort).to eq sorted <add> <add> expect { described_class.graph_package_dependencies([formula3, formula4]).tsort }.to raise_error TSort::Cyclic <add> end <add> end <add>end <ide><path>Library/Homebrew/upgrade.rb <ide> require "development_tools" <ide> require "messages" <ide> require "cleanup" <add>require "utils/topological_hash" <ide> <ide> module Homebrew <ide> # Helper functions for upgrading formulae. <ide> def upgrade_formulae( <ide> end <ide> end <ide> <add> dependency_graph = Utils::TopologicalHash.graph_package_dependencies(formulae_to_install) <add> begin <add> formulae_to_install = dependency_graph.tsort & formulae_to_install <add> rescue TSort::Cyclic <add> # Failed to sort formulae topologically because there are cyclic <add> # dependencies. Let FormulaInstaller handle it. <add> end <add> <ide> formula_installers = formulae_to_install.map do |formula| <ide> Migrator.migrate_if_needed(formula, force: force, dry_run: dry_run) <ide> begin <ide><path>Library/Homebrew/utils/topological_hash.rb <add># typed: true <add># frozen_string_literal: true <add> <add>require "tsort" <add> <add>module Utils <add> # Topologically sortable hash map. <add> class TopologicalHash < Hash <add> extend T::Sig <add> <add> include TSort <add> <add> sig { <add> params( <add> packages: T.any(Cask::Cask, Formula, T::Array[T.any(Cask::Cask, Formula)]), <add> accumulator: TopologicalHash, <add> ).returns(TopologicalHash) <add> } <add> def self.graph_package_dependencies(packages, accumulator = TopologicalHash.new) <add> packages = Array(packages) <add> <add> packages.each do |cask_or_formula| <add> next accumulator if accumulator.key?(cask_or_formula) <add> <add> if cask_or_formula.is_a?(Cask::Cask) <add> formula_deps = cask_or_formula.depends_on <add> .formula <add> .map { |f| Formula[f] } <add> cask_deps = cask_or_formula.depends_on <add> .cask <add> .map { |c| Cask::CaskLoader.load(c, config: nil) } <add> else <add> formula_deps = cask_or_formula.deps <add> .reject(&:build?) <add> .map(&:to_formula) <add> cask_deps = cask_or_formula.requirements <add> .map(&:cask) <add> .compact <add> .map { |c| Cask::CaskLoader.load(c, config: nil) } <add> end <add> <add> accumulator[cask_or_formula] ||= [] <add> accumulator[cask_or_formula] += formula_deps <add> accumulator[cask_or_formula] += cask_deps <add> <add> graph_package_dependencies(formula_deps, accumulator) <add> graph_package_dependencies(cask_deps, accumulator) <add> end <add> <add> accumulator <add> end <add> <add> private <add> <add> def tsort_each_node(&block) <add> each_key(&block) <add> end <add> <add> def tsort_each_child(node, &block) <add> fetch(node).each(&block) <add> end <add> end <add>end
6
Ruby
Ruby
extract array to a constant
d46cf353d6cd9aaaa69c72599bef3ea43977a610
<ide><path>activesupport/lib/active_support/callbacks.rb <ide> module Callbacks <ide> extend ActiveSupport::DescendantsTracker <ide> end <ide> <add> CALLBACK_FILTER_TYPES = [:before, :after, :around] <add> <ide> # Runs the callbacks for the given event. <ide> # <ide> # Calls the before and around callbacks in the order they were set, yields <ide> def __callback_runner_name(kind) <ide> # This is used internally to append, prepend and skip callbacks to the <ide> # CallbackChain. <ide> def __update_callbacks(name, filters = [], block = nil) #:nodoc: <del> type = [:before, :after, :around].include?(filters.first) ? filters.shift : :before <add> type = CALLBACK_FILTER_TYPES.include?(filters.first) ? filters.shift : :before <ide> options = filters.last.is_a?(Hash) ? filters.pop : {} <ide> filters.unshift(block) if block <ide>
1
Javascript
Javascript
test identifier caching
80d713ec7e00a0b63de68ce71cf218f50d1907cf
<ide><path>test/RecordIdsPlugin.test.js <add>var should = require("should"); <add> <add>var path = require("path"); <add>var webpack = require("../lib/webpack"); <add> <add>var RecordIdsPlugin = require("../lib/RecordIdsPlugin"); <add> <add>function makeRelative(compiler, identifier) { <add> var context = compiler.context; <add> return identifier.split("|").map(function(str) { <add> return str.split("!").map(function(str) { <add> return path.relative(context, str); <add> }).join("!"); <add> }).join("|"); <add>} <add> <add>describe("RecordIdsPlugin", function() { <add> <add> var compiler; <add> <add> before(function() { <add> compiler = webpack({ <add> entry: "./nodetest/entry", <add> context: path.join(__dirname, "fixtures"), <add> output: { <add> path: path.join(__dirname, "nodetest", "js"), <add> filename: "result1.js" <add> } <add> }); <add> <add> compiler.plugin("compilation", function(compilation, callback) { <add> compilation.plugin("should-record", function() { <add> return true; <add> }); <add> }); <add> }); <add> <add> it("should cache identifiers", function(done) { <add> compiler.compile(function(err, compilation) { <add> if(err) done(err); <add> var pass = true; <add> for(var i = 0; i < compilation.modules.length; i++) { <add> try { <add> should.exist(compilation.modules[i].portableId); <add> compilation.modules[i].portableId.should.equal(makeRelative(compiler, compilation.modules[i].identifier())); <add> } catch(e) { <add> done(e); <add> pass = false; <add> break; <add> } <add> } <add> if(pass) done(); <add> }); <add> }); <add>});
1
Python
Python
remove schema=none until optional
65448b2e34ab55291a52caaa950e9c427f85902c
<ide><path>spacy/util.py <ide> def resolve_dot_names(config: Config, dot_names: List[Optional[str]]) -> List[Op <ide> section = name.split(".")[0] <ide> # We want to avoid resolving the same thing twice. <ide> if section not in resolved: <del> resolved[section] = registry.resolve(config[section], schema=None) <add> resolved[section] = registry.resolve(config[section]) <ide> output.append(dot_to_object(resolved, name)) <ide> return output <ide>
1
Javascript
Javascript
use blue on non-windows systems for number/bigint"
1329844a0808705091891175a6bee58358380af6
<ide><path>lib/util.js <ide> inspect.colors = Object.assign(Object.create(null), { <ide> }); <ide> <ide> // Don't use 'blue' not visible on cmd.exe <del>const windows = process.platform === 'win32'; <ide> inspect.styles = Object.assign(Object.create(null), { <ide> 'special': 'cyan', <del> 'number': windows ? 'yellow' : 'blue', <del> 'bigint': windows ? 'yellow' : 'blue', <add> 'number': 'yellow', <add> 'bigint': 'yellow', <ide> 'boolean': 'yellow', <ide> 'undefined': 'grey', <ide> 'null': 'bold', <ide><path>test/parallel/test-stream-buffer-list.js <ide> assert.deepStrictEqual(list, new BufferList()); <ide> <ide> const tmp = util.inspect.defaultOptions.colors; <ide> util.inspect.defaultOptions = { colors: true }; <del>const color = util.inspect.colors[util.inspect.styles.number]; <ide> assert.strictEqual( <ide> util.inspect(list), <del> `BufferList { length: \u001b[${color[0]}m0\u001b[${color[1]}m }`); <add> 'BufferList { length: \u001b[33m0\u001b[39m }'); <ide> util.inspect.defaultOptions = { colors: tmp };
2
Python
Python
default it to none
36d2332ae8bb96664a268d043a40a967d8d01412
<ide><path>libcloud/storage/drivers/s3.py <ide> def _put_object(self, container, object_name, method='PUT', <ide> response = response <ide> server_hash = headers.get('etag', '').replace('"', '') <ide> server_side_encryption = headers.get('x-amz-server-side-encryption', <del> True) <add> None) <ide> <ide> # NOTE: If AWS KMS server side encryption is enabled, ETag won't <ide> # contain object MD5 digest so we skip the checksum check
1
Ruby
Ruby
simplify version checks
8c6defd9cf58840bd08af30147a9f6305a219712
<ide><path>Library/Homebrew/extend/os/mac/diagnostic.rb <ide> def check_for_unsupported_macos <ide> end <ide> <ide> def check_xcode_up_to_date <del> return unless MacOS::Xcode.installed? <ide> return unless MacOS::Xcode.outdated? <ide> <ide> # Travis CI images are going to end up outdated so don't complain when <ide> def check_xcode_up_to_date <ide> end <ide> <ide> def check_clt_up_to_date <del> return unless MacOS::CLT.installed? <ide> return unless MacOS::CLT.outdated? <ide> <ide> # Travis CI images are going to end up outdated so don't complain when <ide> def check_xcode_8_without_clt_on_el_capitan <ide> end <ide> <ide> def check_xcode_minimum_version <del> return unless MacOS::Xcode.installed? <ide> return unless MacOS::Xcode.below_minimum_version? <ide> <ide> <<~EOS <ide> def check_xcode_minimum_version <ide> end <ide> <ide> def check_clt_minimum_version <del> return unless MacOS::CLT.installed? <ide> return unless MacOS::CLT.below_minimum_version? <ide> <ide> <<~EOS <ide> def check_xcode_license_approved <ide> EOS <ide> end <ide> <del> def check_for_latest_xquartz <del> return unless MacOS::XQuartz.version <del> return if MacOS::XQuartz.provided_by_apple? <del> <del> installed_version = Version.create(MacOS::XQuartz.version) <del> latest_version = Version.create(MacOS::XQuartz.latest_version) <del> return if installed_version >= latest_version <add> def check_xquartz_up_to_date <add> return unless MacOS::XQuartz.outdated? <ide> <ide> <<~EOS <ide> Your XQuartz (#{installed_version}) is outdated. <ide> def check_for_latest_xquartz <ide> end <ide> <ide> def check_for_beta_xquartz <del> return unless MacOS::XQuartz.version <del> return unless MacOS::XQuartz.version.include? "beta" <add> return unless MacOS::XQuartz.version.to_s.include?("beta") <ide> <ide> <<~EOS <ide> The following beta release of XQuartz is installed: #{MacOS::XQuartz.version}
1
Javascript
Javascript
remove unused catch bindings
5407690bd79a4aa9b5ea72acb98d1a8efd309029
<ide><path>benchmark/_http-benchmarkers.js <ide> class AutocannonBenchmarker { <ide> let result; <ide> try { <ide> result = JSON.parse(output); <del> } catch (err) { <add> } catch { <ide> return undefined; <ide> } <ide> if (!result || !result.requests || !result.requests.average) { <ide> class TestDoubleBenchmarker { <ide> let result; <ide> try { <ide> result = JSON.parse(output); <del> } catch (err) { <add> } catch { <ide> return undefined; <ide> } <ide> return result.throughput; <ide><path>benchmark/child_process/child-process-exec-stdout.js <ide> function childProcessExecStdout({ dur, len }) { <ide> // Sometimes there's a yes.exe process left hanging around on Windows. <ide> try { <ide> execSync(`taskkill /f /t /pid ${child.pid}`); <del> } catch (_) { <add> } catch { <ide> // this is a best effort kill. stderr is piped to parent for tracing. <ide> } <ide> } else { <ide><path>benchmark/fs/read-stream-throughput.js <ide> function runTest() { <ide> }); <ide> <ide> rs.on('end', function() { <del> try { fs.unlinkSync(filename); } catch (e) {} <add> try { fs.unlinkSync(filename); } catch {} <ide> // MB/sec <ide> bench.end(bytes / (1024 * 1024)); <ide> }); <ide> function makeFile() { <ide> buf.fill('x'); <ide> } <ide> <del> try { fs.unlinkSync(filename); } catch (e) {} <add> try { fs.unlinkSync(filename); } catch {} <ide> var w = 1024; <ide> const ws = fs.createWriteStream(filename); <ide> ws.on('close', runTest); <ide><path>benchmark/fs/readfile-partitioned.js <ide> const bench = common.createBenchmark(main, { <ide> <ide> function main(conf) { <ide> const len = +conf.len; <del> try { fs.unlinkSync(filename); } catch (e) {} <add> try { fs.unlinkSync(filename); } catch {} <ide> var data = Buffer.alloc(len, 'x'); <ide> fs.writeFileSync(filename, data); <ide> data = null; <ide> function main(conf) { <ide> const totalOps = reads + zips; <ide> benchEnded = true; <ide> bench.end(totalOps); <del> try { fs.unlinkSync(filename); } catch (e) {} <add> try { fs.unlinkSync(filename); } catch {} <ide> }, +conf.dur * 1000); <ide> <ide> function read() { <ide><path>benchmark/fs/readfile.js <ide> const bench = common.createBenchmark(main, { <ide> }); <ide> <ide> function main({ len, dur, concurrent }) { <del> try { fs.unlinkSync(filename); } catch (e) {} <add> try { fs.unlinkSync(filename); } catch {} <ide> var data = Buffer.alloc(len, 'x'); <ide> fs.writeFileSync(filename, data); <ide> data = null; <ide> function main({ len, dur, concurrent }) { <ide> setTimeout(function() { <ide> benchEnded = true; <ide> bench.end(reads); <del> try { fs.unlinkSync(filename); } catch (e) {} <add> try { fs.unlinkSync(filename); } catch {} <ide> process.exit(0); <ide> }, dur * 1000); <ide> <ide><path>benchmark/fs/write-stream-throughput.js <ide> function main({ dur, encodingType, size }) { <ide> throw new Error(`invalid encodingType: ${encodingType}`); <ide> } <ide> <del> try { fs.unlinkSync(filename); } catch (e) {} <add> try { fs.unlinkSync(filename); } catch {} <ide> <ide> var started = false; <ide> var ended = false; <ide> function main({ dur, encodingType, size }) { <ide> f.on('finish', function() { <ide> ended = true; <ide> const written = fs.statSync(filename).size / 1024; <del> try { fs.unlinkSync(filename); } catch (e) {} <add> try { fs.unlinkSync(filename); } catch {} <ide> bench.end(written / 1024); <ide> }); <ide> <ide><path>benchmark/misc/punycode.js <ide> const common = require('../common.js'); <ide> let icu; <ide> try { <ide> icu = common.binding('icu'); <del>} catch (err) {} <add>} catch {} <ide> const punycode = require('punycode'); <ide> <ide> const bench = common.createBenchmark(main, { <ide><path>benchmark/module/module-loader.js <ide> const bench = common.createBenchmark(main, { <ide> <ide> function main({ n, fullPath, useCache }) { <ide> tmpdir.refresh(); <del> try { fs.mkdirSync(benchmarkDirectory); } catch (e) {} <add> try { fs.mkdirSync(benchmarkDirectory); } catch {} <ide> for (var i = 0; i <= n; i++) { <ide> fs.mkdirSync(`${benchmarkDirectory}${i}`); <ide> fs.writeFileSync( <ide><path>benchmark/napi/function_args/index.js <ide> let napi; <ide> <ide> try { <ide> v8 = require('./build/Release/binding'); <del>} catch (err) { <add>} catch { <ide> console.error(`${__filename}: V8 Binding failed to load`); <ide> process.exit(0); <ide> } <ide> <ide> try { <ide> napi = require('./build/Release/napi_binding'); <del>} catch (err) { <add>} catch { <ide> console.error(`${__filename}: NAPI-Binding failed to load`); <ide> process.exit(0); <ide> } <ide><path>benchmark/napi/function_call/index.js <ide> const common = require('../../common.js'); <ide> <ide> try { <ide> var binding = require('./build/Release/binding'); <del>} catch (er) { <add>} catch { <ide> console.error('misc/function_call.js Binding failed to load'); <ide> process.exit(0); <ide> } <ide> const cxx = binding.hello; <ide> let napi_binding; <ide> try { <ide> napi_binding = require('./build/Release/napi_binding'); <del>} catch (er) { <add>} catch { <ide> console.error('misc/function_call/index.js NAPI-Binding failed to load'); <ide> process.exit(0); <ide> }
10
Java
Java
reuse existing methods
faea9b54ecd5f9790382eb94e8a97aa462fb7663
<ide><path>rxjava-core/src/main/java/rx/Observable.java <ide> public final static <T> Observable<T> from(T[] items, Scheduler scheduler) { <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/hh229027.aspx">MSDN: Observable.Interval</a> <ide> */ <ide> public final static Observable<Long> interval(long interval, TimeUnit unit) { <del> return create(new OnSubscribeTimerPeriodically(interval, interval, unit, Schedulers.computation())); <add> return interval(interval, unit, Schedulers.computation()); <ide> } <ide> <ide> /** <ide> public final <TClosing> Observable<List<T>> buffer(Func0<? extends Observable<? <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/system.reactive.linq.observable.buffer.aspx">MSDN: Observable.Buffer</a> <ide> */ <ide> public final Observable<List<T>> buffer(int count) { <del> return lift(new OperatorBufferWithSize<T>(count, count)); <add> return buffer(count, count); <ide> } <ide> <ide> /** <ide> public final Observable<List<T>> buffer(int count, int skip) { <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/system.reactive.linq.observable.buffer.aspx">MSDN: Observable.Buffer</a> <ide> */ <ide> public final Observable<List<T>> buffer(long timespan, long timeshift, TimeUnit unit) { <del> return lift(new OperatorBufferWithTime<T>(timespan, timeshift, unit, Integer.MAX_VALUE, Schedulers.computation())); <add> return buffer(timespan, timeshift, unit, Schedulers.computation()); <ide> } <ide> <ide> /** <ide> public final Observable<List<T>> buffer(long timespan, long timeshift, TimeUnit <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/system.reactive.linq.observable.buffer.aspx">MSDN: Observable.Buffer</a> <ide> */ <ide> public final Observable<List<T>> buffer(long timespan, TimeUnit unit) { <del> return lift(new OperatorBufferWithTime<T>(timespan, timespan, unit, Integer.MAX_VALUE, Schedulers.computation())); <add> return buffer(timespan, unit, Integer.MAX_VALUE, Schedulers.computation()); <ide> } <ide> <ide> /** <ide> public final Observable<List<T>> buffer(long timespan, TimeUnit unit, int count, <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/system.reactive.linq.observable.buffer.aspx">MSDN: Observable.Buffer</a> <ide> */ <ide> public final Observable<List<T>> buffer(long timespan, TimeUnit unit, Scheduler scheduler) { <del> return lift(new OperatorBufferWithTime<T>(timespan, timespan, unit, Integer.MAX_VALUE, scheduler)); <add> return buffer(timespan, timespan, unit, scheduler); <ide> } <ide> <ide> /** <ide> public final <TOpening, TClosing> Observable<List<T>> buffer(Observable<? extend <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/system.reactive.linq.observable.buffer.aspx">MSDN: Observable.Buffer</a> <ide> */ <ide> public final <B> Observable<List<T>> buffer(Observable<B> boundary) { <del> return lift(new OperatorBufferWithSingleObservable<T, B>(boundary, 16)); <add> return buffer(boundary, 16); <ide> } <ide> <ide> /** <ide> public final <U> Observable<T> debounce(Func1<? super T, ? extends Observable<U> <ide> * @see #throttleWithTimeout(long, TimeUnit) <ide> */ <ide> public final Observable<T> debounce(long timeout, TimeUnit unit) { <del> return lift(new OperatorDebounceWithTime<T>(timeout, unit, Schedulers.computation())); <add> return debounce(timeout, unit, Schedulers.computation()); <ide> } <ide> <ide> /** <ide> public final <U> Observable<T> delay(Func1<? super T, ? extends Observable<U>> i <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/hh229810.aspx">MSDN: Observable.Delay</a> <ide> */ <ide> public final Observable<T> delay(long delay, TimeUnit unit) { <del> return create(new OnSubscribeDelay<T>(this, delay, unit, Schedulers.computation())); <add> return delay(delay, unit, Schedulers.computation()); <ide> } <ide> <ide> /** <ide> public final Observable<T> onExceptionResumeNext(final Observable<? extends T> r <ide> * @see <a href="http://www.grahamlea.com/2014/07/rxjava-threading-examples/">RxJava Threading Examples</a> <ide> */ <ide> public final <R> Observable<R> parallel(Func1<Observable<T>, Observable<R>> f) { <del> return lift(new OperatorParallel<T, R>(f, Schedulers.computation())); <add> return parallel(f, Schedulers.computation()); <ide> } <ide> <ide> /** <ide> public final Observable<T> retryWhen(Func1<? super Observable<? extends Notifica <ide> * @see #throttleLast(long, TimeUnit) <ide> */ <ide> public final Observable<T> sample(long period, TimeUnit unit) { <del> return lift(new OperatorSampleWithTime<T>(period, unit, Schedulers.computation())); <add> return sample(period, unit, Schedulers.computation()); <ide> } <ide> <ide> /** <ide> public final Observable<T> takeWhileWithIndex(final Func2<? super T, ? super Int <ide> * @see <a href="https://github.com/Netflix/RxJava/wiki/Backpressure">RxJava wiki: Backpressure</a> <ide> */ <ide> public final Observable<T> throttleFirst(long windowDuration, TimeUnit unit) { <del> return lift(new OperatorThrottleFirst<T>(windowDuration, unit, Schedulers.computation())); <add> return throttleFirst(windowDuration, unit, Schedulers.computation()); <ide> } <ide> <ide> /** <ide> public final Observable<T> throttleWithTimeout(long timeout, TimeUnit unit, Sche <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/hh212107.aspx">MSDN: Observable.TimeInterval</a> <ide> */ <ide> public final Observable<TimeInterval<T>> timeInterval() { <del> return lift(new OperatorTimeInterval<T>(Schedulers.immediate())); <add> return timeInterval(Schedulers.immediate()); <ide> } <ide> <ide> /** <ide> public final <TClosing> Observable<Observable<T>> window(Func0<? extends Observa <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/system.reactive.linq.observable.window.aspx">MSDN: Observable.Window</a> <ide> */ <ide> public final Observable<Observable<T>> window(int count) { <del> return lift(new OperatorWindowWithSize<T>(count, count)); <add> return window(count, count); <ide> } <ide> <ide> /** <ide> public final Observable<Observable<T>> window(long timespan, long timeshift, Tim <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/system.reactive.linq.observable.window.aspx">MSDN: Observable.Window</a> <ide> */ <ide> public final Observable<Observable<T>> window(long timespan, TimeUnit unit) { <del> return lift(new OperatorWindowWithTime<T>(timespan, timespan, unit, Integer.MAX_VALUE, Schedulers.computation())); <add> return window(timespan, timespan, unit, Schedulers.computation()); <ide> } <ide> <ide> /** <ide> public final Observable<Observable<T>> window(long timespan, TimeUnit unit) { <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/system.reactive.linq.observable.window.aspx">MSDN: Observable.Window</a> <ide> */ <ide> public final Observable<Observable<T>> window(long timespan, TimeUnit unit, int count) { <del> return lift(new OperatorWindowWithTime<T>(timespan, timespan, unit, count, Schedulers.computation())); <add> return window(timespan, unit, count, Schedulers.computation()); <ide> } <ide> <ide> /** <ide> public final Observable<Observable<T>> window(long timespan, TimeUnit unit, int <ide> * @see <a href="http://msdn.microsoft.com/en-us/library/system.reactive.linq.observable.window.aspx">MSDN: Observable.Window</a> <ide> */ <ide> public final Observable<Observable<T>> window(long timespan, TimeUnit unit, Scheduler scheduler) { <del> return lift(new OperatorWindowWithTime<T>(timespan, timespan, unit, Integer.MAX_VALUE, scheduler)); <add> return window(timespan, unit, Integer.MAX_VALUE, scheduler); <ide> } <ide> <ide> /**
1
Javascript
Javascript
remove unnecessary variables
ec39e62ff26b3efcdcf1bbfd423c452008ea9028
<ide><path>lib/net.js <ide> Server.prototype.listen = function(...args) { <ide> throw new ERR_SERVER_ALREADY_LISTEN(); <ide> } <ide> <del> var hasCallback = (cb !== null); <del> if (hasCallback) { <add> if (cb !== null) { <ide> this.once('listening', cb); <ide> } <ide> var backlogFromArgs = <ide> if (process.platform === 'win32') { <ide> } <ide> }; <ide> } else { <del> _setSimultaneousAccepts = function(handle) {}; <add> _setSimultaneousAccepts = function() {}; <ide> } <ide> <ide> module.exports = {
1
Ruby
Ruby
use formula path when installing bottle
687c87d74d76b4b7f6264f7508f92f3521327ebb
<ide><path>Library/Homebrew/formulary.rb <ide> def initialize(bottle_name) <ide> def get_formula(spec, force_bottle: false, flags: [], **) <ide> contents = Utils::Bottles.formula_contents @bottle_filename, name: name <ide> formula = begin <del> Formulary.from_contents(name, @bottle_filename, contents, spec, force_bottle: force_bottle, flags: flags) <add> Formulary.from_contents(name, path, contents, spec, force_bottle: force_bottle, flags: flags) <ide> rescue FormulaUnreadableError => e <ide> opoo <<~EOS <ide> Unreadable formula in #{@bottle_filename}:
1