From 7af74b040213486d9c76ef41931011a2a4859119 Mon Sep 17 00:00:00 2001 From: Matteo Collina Date: Tue, 14 Mar 2017 14:29:34 +0100 Subject: [PATCH] node v7.7.2 build (#262) * replace bind() * re-add WriteReq and CorkedRequest constructors * Updated to v7.7.2, refactored the build to not crawl The new build system uses the tarball from nodejs.org. * try updating zuul * just remove android --- .travis.yml | 2 - README.md | 2 +- build/build.js | 117 ++++++++----- build/files.js | 47 ++++++ build/package.json | 11 +- lib/_stream_readable.js | 2 +- lib/_stream_writable.js | 51 +++--- test/common.js | 125 +++++++++++++- test/parallel/test-stream-big-push.js | 4 +- test/parallel/test-stream-duplex.js | 1 + test/parallel/test-stream-end-paused.js | 2 +- test/parallel/test-stream-pipe-after-end.js | 27 +-- ...t-stream-pipe-await-drain-manual-resume.js | 4 + ...tream-pipe-await-drain-push-while-write.js | 14 +- test/parallel/test-stream-pipe-await-drain.js | 12 ++ .../test-stream-pipe-error-handling.js | 90 +++++----- .../test-stream-pipe-unpipe-streams.js | 2 +- test/parallel/test-stream-push-strings.js | 4 +- ...stream-readable-constructor-set-methods.js | 2 +- test/parallel/test-stream-readable-event.js | 86 +++++----- .../test-stream-readable-flow-recursion.js | 8 +- .../test-stream-readable-needReadable.js | 2 +- .../test-stream-readable-resumeScheduled.js | 104 ++++++------ ...tream-transform-constructor-set-methods.js | 14 +- ...tream-transform-objectmode-falsey-value.js | 17 +- .../test-stream-transform-split-objectmode.js | 4 +- .../test-stream-unshift-empty-chunk.js | 2 +- .../parallel/test-stream-unshift-read-race.js | 41 ++--- ...stream-writable-change-default-encoding.js | 6 +- ...stream-writable-constructor-set-methods.js | 6 +- ...ableState-uncorked-bufferedRequestCount.js | 59 +++++++ test/parallel/test-stream-writev.js | 12 +- ...est-stream2-base64-single-char-read-end.js | 5 +- test/parallel/test-stream2-compatibility.js | 2 +- .../parallel/test-stream2-large-read-stall.js | 15 +- test/parallel/test-stream2-objects.js | 2 +- .../test-stream2-pipe-error-handling.js | 154 +++++++++--------- test/parallel/test-stream2-push.js | 7 +- ...st-stream2-readable-empty-buffer-no-eof.js | 4 +- .../test-stream2-readable-from-list.js | 4 +- .../test-stream2-readable-non-empty-end.js | 8 +- test/parallel/test-stream2-set-encoding.js | 20 +-- test/parallel/test-stream2-transform.js | 4 +- test/parallel/test-stream2-unpipe-drain.js | 16 +- test/parallel/test-stream2-unpipe-leak.js | 16 +- test/parallel/test-stream2-writable.js | 21 +-- test/parallel/test-stream3-cork-end.js | 10 +- test/parallel/test-stream3-cork-uncork.js | 10 +- test/parallel/test-stream3-pause-then-read.js | 8 +- 49 files changed, 716 insertions(+), 470 deletions(-) create mode 100644 test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js diff --git a/.travis.yml b/.travis.yml index 84504c98f5..a6285ff0e7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,8 +28,6 @@ matrix: env: TASK=test - node_js: 6 env: TASK=test - - node_js: 5 - env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest" - node_js: 5 env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest" - node_js: 5 diff --git a/README.md b/README.md index 6cc54caa84..7e729fe472 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ npm install --save readable-stream This package is a mirror of the Streams2 and Streams3 implementations in Node-core. -Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v7.3.0/docs/api/). +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v7.7.2/docs/api/). If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). diff --git a/build/build.js b/build/build.js index a54eae8181..2563274b11 100644 --- a/build/build.js +++ b/build/build.js @@ -1,13 +1,17 @@ #!/usr/bin/env node -const hyperquest = require('hyperzip')(require('hyperdirect')) +const hyperquest = require('hyperquest') , bl = require('bl') , fs = require('fs') , path = require('path') - , cheerio = require('cheerio') + , tar = require('tar-fs') + , gunzip = require('gunzip-maybe') , babel = require('babel-core') + , glob = require('glob') + , pump = require('pump') + , rimraf = require('rimraf') , encoding = 'utf8' - , urlRegex = /^https?:\/\// + , urlRegex = /^https?:\/\// , nodeVersion = process.argv[2] , nodeVersionRegexString = '\\d+\\.\\d+\\.\\d+' , usageVersionRegex = RegExp('^' + nodeVersionRegexString + '$') @@ -18,10 +22,10 @@ const hyperquest = require('hyperzip')(require('hyperdirect')) , files = require('./files') , testReplace = require('./test-replacements') - , srcurlpfx = `https://raw.githubusercontent.com/nodejs/node/v${nodeVersion}/` - , libsrcurl = srcurlpfx + 'lib/' - , testsrcurl = srcurlpfx + 'test/parallel/' - , testlisturl = `https://github.com/nodejs/node/tree/v${nodeVersion}/test/parallel` + , downloadurl = `https://nodejs.org/dist/v${nodeVersion}/node-v${nodeVersion}.tar.gz` + , src = path.join(__dirname, `node-v${nodeVersion}`) + , libsrcurl = path.join(src, 'lib/') + , testsrcurl = path.join(src, 'test/parallel/') , libourroot = path.join(__dirname, '../lib/') , testourroot = path.join(__dirname, '../test/parallel/') @@ -33,16 +37,19 @@ if (!usageVersionRegex.test(nodeVersion)) { // `inputLoc`: URL or local path. function processFile (inputLoc, out, replacements) { - var file = urlRegex.test(inputLoc) ? - hyperquest(inputLoc) : - fs.createReadStream(inputLoc, encoding) + var file = fs.createReadStream(inputLoc, encoding) file.pipe(bl(function (err, data) { if (err) throw err + console.log('Processing', inputLoc) data = data.toString() replacements.forEach(function (replacement) { - data = data.replace.apply(data, replacement) + const regexp = replacement[0] + var arg2 = replacement[1] + if (typeof arg2 === 'function') + arg2 = arg2.bind(data) + data = data.replace(regexp, arg2) }) if (inputLoc.slice(-3) === '.js') { const transformed = babel.transform(data, { @@ -69,7 +76,7 @@ function deleteOldTests(){ const files = fs.readdirSync(path.join(__dirname, '..', 'test', 'parallel')); for (let file of files) { let name = path.join(__dirname, '..', 'test', 'parallel', file); - console.log('removing', name); + console.log('Removing', name); fs.unlinkSync(name); } } @@ -94,43 +101,73 @@ function processTestFile (file) { } //-------------------------------------------------------------------- -// Grab & process files in ../lib/ +// Download the release from nodejs.org +console.log(`Downloading ${downloadurl}`) +pump( + hyperquest(downloadurl), + gunzip(), + tar.extract(__dirname), + function (err) { + if (err) { + throw err + } -Object.keys(files).forEach(processLibFile) -// delete the current contents of test/parallel so if node removes any tests -// they are removed here -deleteOldTests(); + //-------------------------------------------------------------------- + // Grab & process files in ../lib/ -//-------------------------------------------------------------------- -// Discover, grab and process all test-stream* files on nodejs/node + Object.keys(files).forEach(processLibFile) -hyperquest(testlisturl).pipe(bl(function (err, data) { - if (err) - throw err - var $ = cheerio.load(data.toString()) + //-------------------------------------------------------------------- + // Discover, grab and process all test-stream* files on the given release - $('table.files .js-navigation-open').each(function () { - var file = $(this).text() - if (/^test-stream/.test(file) && !/-wrap(?:-encoding)?\.js$/.test(file) && file !== 'test-stream2-httpclient-response-end.js' && file !== 'test-stream-base-no-abort.js' && file !== 'test-stream-preprocess.js' && file !== 'test-stream-inheritance.js') - processTestFile(file) - }) -})) + glob(path.join(testsrcurl, 'test-stream*.js'), function (err, list) { + if (err) { + throw err + } + list.forEach(function (file) { + file = path.basename(file) + if (!/-wrap(?:-encoding)?\.js$/.test(file) && + file !== 'test-stream2-httpclient-response-end.js' && + file !== 'test-stream-base-no-abort.js' && + file !== 'test-stream-preprocess.js' && + file !== 'test-stream-inheritance.js') { + processTestFile(file) + } + }) + }) -//-------------------------------------------------------------------- -// Grab the nodejs/node test/common.js -processFile( - testsrcurl.replace(/parallel\/$/, 'common.js') - , path.join(testourroot, '../common.js') - , testReplace['common.js'] + //-------------------------------------------------------------------- + // Grab the nodejs/node test/common.js + + processFile( + testsrcurl.replace(/parallel\/$/, 'common.js') + , path.join(testourroot, '../common.js') + , testReplace['common.js'] + ) + + //-------------------------------------------------------------------- + // Update Node version in README + + processFile(readmePath, readmePath, [ + [readmeVersionRegex, "$1" + nodeVersion] + ]) + } ) -//-------------------------------------------------------------------- -// Update Node version in README +// delete the current contents of test/parallel so if node removes any tests +// they are removed here +deleteOldTests(); -processFile(readmePath, readmePath, [ - [readmeVersionRegex, "$1" + nodeVersion] -]) +process.once('beforeExit', function () { + rimraf(src, function (err) { + if (err) { + throw err + } + + console.log('Removed', src) + }) +}) diff --git a/build/files.js b/build/files.js index ede126a18a..57881ce96f 100644 --- a/build/files.js +++ b/build/files.js @@ -183,6 +183,47 @@ const headRegexp = /(^module.exports = \w+;?)/m /if \(typeof Symbol === 'function' && Symbol\.hasInstance\) \{/, `if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {` ] + , removeOnWriteBind = [ + /onwrite\.bind\([^)]+?\)/, + `function(er) { onwrite(stream, er); }` + ] + , removeCorkedFinishBind = [ + /onCorkedFinish\.bind\([^)]+?\)/, + function (match) { + const code = this + var src = /^function onCorkedFinish[^{]*?\{([\s\S]+?\r?\n)\}/m.exec(code) + src = src[1].trim().replace(/corkReq/g, 'this').replace(/(\r?\n)/mg, ' $1') + return `(err) => {\n${src}\n}` + } + ] + , removeOnCorkedFinish = [ + /^function onCorkedFinish[\s\S]+?\r?\n\}/m, + '' + ] + , addConstructors = [ + headRegexp, + `$1\n\nfunction WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; + } + // It seems a linked list but it is not + // there will be only 2 of these for each stream + function CorkedRequest(state) { + this.next = null; + this.entry = null; + this.finish = onCorkedFinish.bind(undefined, this, state); + }\n\n` + ] + , useWriteReq = [ + /state\.lastBufferedRequest = \{.+?\}/g, + `state.lastBufferedRequest = new WriteReq(chunk, encoding, cb)` + ] + , useCorkedRequest = [ + /var corkReq = [\s\S]+?(.+?)\.corkedRequestsFree = corkReq/g, + `$1.corkedRequestsFree = new CorkedRequest($1)` + ] module.exports['_stream_duplex.js'] = [ requireReplacement @@ -261,6 +302,12 @@ module.exports['_stream_writable.js'] = [ , bufferShimFix , bufferStaticMethods , fixInstanceCheck + , removeOnWriteBind + , removeCorkedFinishBind + , removeOnCorkedFinish + , addConstructors + , useWriteReq + , useCorkedRequest ] module.exports['internal/streams/BufferList.js'] = [ bufferShimFix diff --git a/build/package.json b/build/package.json index 9943b714ea..701fbb18c0 100644 --- a/build/package.json +++ b/build/package.json @@ -12,9 +12,12 @@ "babel-plugin-transform-es2015-parameters": "^6.11.4", "babel-plugin-transform-es2015-shorthand-properties": "^6.8.0", "babel-plugin-transform-es2015-template-literals": "^6.8.0", - "bl": "~0.6.0", - "cheerio": "~0.13.1", - "hyperdirect": "0.0.0", - "hyperzip": "0.0.0" + "bl": "^1.2.0", + "glob": "^7.1.1", + "gunzip-maybe": "^1.4.0", + "hyperquest": "^2.1.2", + "pump": "^1.0.2", + "rimraf": "^2.6.1", + "tar-fs": "^1.15.1" } } diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 3a7d42d62b..13b5a7474d 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -92,7 +92,7 @@ function ReadableState(options, stream) { this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; + this.highWaterMark = ~~this.highWaterMark; // A linked list is used to store data chunks instead of an array because the // linked list can remove elements from the beginning faster than diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js index 4d9c62ba62..f13ef893e4 100644 --- a/lib/_stream_writable.js +++ b/lib/_stream_writable.js @@ -6,6 +6,20 @@ module.exports = Writable; +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + this.next = null; + this.entry = null; + this.finish = onCorkedFinish.bind(undefined, this, state); +} + /**/ var processNextTick = require('process-nextick-args'); /**/ @@ -77,7 +91,7 @@ function WritableState(options, stream) { this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; + this.highWaterMark = ~~this.highWaterMark; // drain event flag. this.needDrain = false; @@ -232,20 +246,16 @@ function writeAfterEnd(stream, cb) { processNextTick(cb, er); } -// If we get something that is not a buffer, string, null, or undefined, -// and we're not in objectMode, then that's an error. -// Otherwise stream chunks are all considered to be of length=1, and the -// watermarks determine how many objects to keep in the buffer, rather than -// how many bytes or characters. +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. function validChunk(stream, state, chunk, cb) { var valid = true; var er = false; - // Always throw error if a null is written - // if we are not in object mode then throw - // if it is not a buffer, string, or undefined. + if (chunk === null) { er = new TypeError('May not write null values to stream'); - } else if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { er = new TypeError('Invalid non-string/buffer chunk'); } if (er) { @@ -259,19 +269,20 @@ function validChunk(stream, state, chunk, cb) { Writable.prototype.write = function (chunk, encoding, cb) { var state = this._writableState; var ret = false; + var isBuf = Buffer.isBuffer(chunk); if (typeof encoding === 'function') { cb = encoding; encoding = null; } - if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; if (typeof cb !== 'function') cb = nop; - if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { state.pendingcb++; - ret = writeOrBuffer(this, state, chunk, encoding, cb); + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); } return ret; @@ -311,10 +322,11 @@ function decodeChunk(state, chunk, encoding) { // if we're already writing something, then just put this // in the queue, and wait our turn. Otherwise, call _write // If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, chunk, encoding, cb) { - chunk = decodeChunk(state, chunk, encoding); - - if (Buffer.isBuffer(chunk)) encoding = 'buffer'; +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + chunk = decodeChunk(state, chunk, encoding); + if (Buffer.isBuffer(chunk)) encoding = 'buffer'; + } var len = state.objectMode ? 1 : chunk.length; state.length += len; @@ -383,8 +395,8 @@ function onwrite(stream, er) { asyncWrite(afterWrite, stream, state, finished, cb); /**/ } else { - afterWrite(stream, state, finished, cb); - } + afterWrite(stream, state, finished, cb); + } } } @@ -535,7 +547,6 @@ function CorkedRequest(state) { this.next = null; this.entry = null; - this.finish = function (err) { var entry = _this.entry; _this.entry = null; diff --git a/test/common.js b/test/common.js index 4cd47342f3..a8ea704f40 100644 --- a/test/common.js +++ b/test/common.js @@ -40,8 +40,9 @@ util.inherits = require('inherits'); /**/ var Timer = { now: function () {} }; +var execSync = require('child_process').execSync; -var testRoot = process.env.NODE_TEST_DIR ? path.resolve(process.env.NODE_TEST_DIR) : __dirname; +var testRoot = process.env.NODE_TEST_DIR ? fs.realpathSync(process.env.NODE_TEST_DIR) : __dirname; exports.fixturesDir = path.join(__dirname, 'fixtures'); exports.tmpDirName = 'tmp'; @@ -66,8 +67,9 @@ exports.rootDir = exports.isWindows ? 'c:\\' : '/'; //exports.buildType = process.config.target_defaults.default_configuration; function rimrafSync(p) { + var st = void 0; try { - var st = fs.lstatSync(p); + st = fs.lstatSync(p); } catch (e) { if (e.code === 'ENOENT') return; } @@ -183,8 +185,8 @@ if (exports.isLinux) { if (exports.isWindows) opensslCli += '.exe'; - var openssl_cmd = child_process.spawnSync(opensslCli, ['version']); - if (openssl_cmd.status !== 0 || openssl_cmd.error !== undefined) { + var opensslCmd = child_process.spawnSync(opensslCli, ['version']); + if (opensslCmd.status !== 0 || opensslCmd.error !== undefined) { // openssl command cannot be executed opensslCli = false; } @@ -225,6 +227,27 @@ exports.hasIPv6 = objectKeys(ifaces).some(function (name) { ); }); +/* + * Check that when running a test with + * `$node --abort-on-uncaught-exception $file child` + * the process aborts. + */ +exports.childShouldThrowAndAbort = function () { + var testCmd = ''; + if (!exports.isWindows) { + // Do not create core files, as it can take a lot of disk space on + // continuous testing and developers' machines + testCmd += 'ulimit -c 0 && '; + } + testCmd += process.argv[0] + ' --abort-on-uncaught-exception '; + testCmd += process.argv[1] + ' child'; + var child = child_process.exec(testCmd); + child.on('exit', function onExit(exitCode, signal) { + var errMsg = 'Test should have aborted ' + ('but instead exited with exit code ' + exitCode) + (' and signal ' + signal); + assert(exports.nodeProcessAborted(exitCode, signal), errMsg); + }); +}; + exports.ddCommand = function (filename, kilobytes) { if (exports.isWindows) { var p = path.resolve(exports.fixturesDir, 'create-file.js'); @@ -277,6 +300,8 @@ exports.spawnSyncPwd = function (options) { exports.platformTimeout = function (ms) { if (process.config.target_defaults.default_configuration === 'Debug') ms = 2 * ms; + if (global.__coverage__) ms = 4 * ms; + if (exports.isAix) return 2 * ms; // default localhost speed is slower on AIX if (process.arch !== 'arm') return ms; @@ -374,7 +399,13 @@ function leakedGlobals() { for (var val in global) { if (!knownGlobals.includes(global[val])) leaked.push(val); - }return leaked; + }if (global.__coverage__) { + return leaked.filter(function (varname) { + return !/^(cov_|__cov)/.test(varname); + }); + } else { + return leaked; + } } exports.leakedGlobals = leakedGlobals; @@ -385,8 +416,7 @@ process.on('exit', function () { if (!exports.globalCheck) return; var leaked = leakedGlobals(); if (leaked.length > 0) { - console.error('Unknown globals: %s', leaked); - fail('Unknown global found'); + fail('Unexpected global(s) found: ' + leaked.join(', ')); } }); @@ -408,7 +438,7 @@ function runCallChecks(exitCode) { } exports.mustCall = function (fn, expected) { - if (typeof expected !== 'number') expected = 1; + if (expected === undefined) expected = 1;else if (typeof expected !== 'number') throw new TypeError('Invalid expected value: ' + expected); var context = { expected: expected, @@ -445,11 +475,44 @@ exports.fileExists = function (pathname) { } }; +exports.canCreateSymLink = function () { + // On Windows, creating symlinks requires admin privileges. + // We'll only try to run symlink test if we have enough privileges. + // On other platforms, creating symlinks shouldn't need admin privileges + if (exports.isWindows) { + // whoami.exe needs to be the one from System32 + // If unix tools are in the path, they can shadow the one we want, + // so use the full path while executing whoami + var whoamiPath = path.join(process.env['SystemRoot'], 'System32', 'whoami.exe'); + + var err = false; + var output = ''; + + try { + output = execSync(whoamiPath + ' /priv', { timout: 1000 }); + } catch (e) { + err = true; + } finally { + if (err || !output.includes('SeCreateSymbolicLinkPrivilege')) { + return false; + } + } + } + + return true; +}; + function fail(msg) { assert.fail(null, null, msg); } exports.fail = fail; +exports.mustNotCall = function (msg) { + return function mustNotCall() { + fail(msg || 'function should not have been called'); + }; +}; + exports.skip = function (msg) { console.log('1..0 # Skipped: ' + msg); }; @@ -540,6 +603,52 @@ exports.expectWarning = function (name, expected) { }); } /**/ +// https://github.com/w3c/testharness.js/blob/master/testharness.js +exports.WPT = { + test: function (fn, desc) { + try { + fn(); + } catch (err) { + if (err instanceof Error) err.message = 'In ' + desc + ':\n ' + err.message; + throw err; + } + }, + assert_equals: assert.strictEqual, + assert_true: function (value, message) { + return assert.strictEqual(value, true, message); + }, + assert_false: function (value, message) { + return assert.strictEqual(value, false, message); + }, + assert_throws: function (code, func, desc) { + assert.throws(func, function (err) { + return typeof err === 'object' && 'name' in err && err.name === code.name; + }, desc); + }, + assert_array_equals: assert.deepStrictEqual, + assert_unreached: function (desc) { + assert.fail(undefined, undefined, 'Reached unreachable code: ' + desc); + } +}; + +// Useful for testing expected internal/error objects +exports.expectsError = function expectsError(_ref) { + var code = _ref.code, + type = _ref.type, + message = _ref.message; + + return function (error) { + assert.strictEqual(error.code, code); + if (type !== undefined) assert(error instanceof type, error + ' is not the expected type ' + type); + if (message instanceof RegExp) { + assert(message.test(error.message), error.message + ' does not match ' + message); + } else if (typeof message === 'string') { + assert.strictEqual(error.message, message); + } + return true; + }; +}; + function forEach(xs, f) { for (var i = 0, l = xs.length; i < l; i++) { f(xs[i], i); diff --git a/test/parallel/test-stream-big-push.js b/test/parallel/test-stream-big-push.js index 4371ce09a7..6bcec0238b 100644 --- a/test/parallel/test-stream-big-push.js +++ b/test/parallel/test-stream-big-push.js @@ -20,8 +20,8 @@ function _read() { }, 1); reads++; } else if (reads === 1) { - var ret = r.push(str); - assert.strictEqual(ret, false); + var _ret = r.push(str); + assert.strictEqual(_ret, false); reads++; } else { r.push(null); diff --git a/test/parallel/test-stream-duplex.js b/test/parallel/test-stream-duplex.js index 98fe87eb8d..b44d9611b1 100644 --- a/test/parallel/test-stream-duplex.js +++ b/test/parallel/test-stream-duplex.js @@ -7,6 +7,7 @@ var Duplex = require('../../').Transform; var stream = new Duplex({ objectMode: true }); +assert(Duplex() instanceof Duplex); assert(stream._readableState.objectMode); assert(stream._writableState.objectMode); diff --git a/test/parallel/test-stream-end-paused.js b/test/parallel/test-stream-end-paused.js index 807fd8cb9a..ef79959656 100644 --- a/test/parallel/test-stream-end-paused.js +++ b/test/parallel/test-stream-end-paused.js @@ -23,7 +23,7 @@ stream.pause(); setTimeout(common.mustCall(function () { stream.on('end', common.mustCall(function () {})); stream.resume(); -})); +}), 1); process.on('exit', function () { assert(calledRead); diff --git a/test/parallel/test-stream-pipe-after-end.js b/test/parallel/test-stream-pipe-after-end.js index ddd6ca72f1..24ac7e9918 100644 --- a/test/parallel/test-stream-pipe-after-end.js +++ b/test/parallel/test-stream-pipe-after-end.js @@ -1,7 +1,7 @@ /**/ var bufferShim = require('buffer-shims'); /**/ -require('../common'); +var common = require('../common'); var assert = require('assert/'); var Readable = require('../../lib/_stream_readable'); var Writable = require('../../lib/_stream_writable'); @@ -14,7 +14,7 @@ function TestReadable(opt) { this._ended = false; } -TestReadable.prototype._read = function (n) { +TestReadable.prototype._read = function () { if (this._ended) this.emit('error', new Error('_read called twice')); this._ended = true; this.push(null); @@ -34,31 +34,18 @@ TestWritable.prototype._write = function (chunk, encoding, cb) { // this one should not emit 'end' until we read() from it later. var ender = new TestReadable(); -var enderEnded = false; // what happens when you pipe() a Readable that's already ended? var piper = new TestReadable(); // pushes EOF null, and length=0, so this will trigger 'end' piper.read(); -setTimeout(function () { - ender.on('end', function () { - enderEnded = true; - }); - assert(!enderEnded); +setTimeout(common.mustCall(function () { + ender.on('end', common.mustCall(function () {})); var c = ender.read(); - assert.equal(c, null); + assert.strictEqual(c, null); var w = new TestWritable(); - var writableFinished = false; - w.on('finish', function () { - writableFinished = true; - }); + w.on('finish', common.mustCall(function () {})); piper.pipe(w); - - process.on('exit', function () { - assert(enderEnded); - assert(writableFinished); - console.log('ok'); - }); -}); \ No newline at end of file +}), 1); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-await-drain-manual-resume.js b/test/parallel/test-stream-pipe-await-drain-manual-resume.js index 82d589d392..3dba96d22f 100644 --- a/test/parallel/test-stream-pipe-await-drain-manual-resume.js +++ b/test/parallel/test-stream-pipe-await-drain-manual-resume.js @@ -3,6 +3,7 @@ var bufferShim = require('buffer-shims'); /**/ var common = require('../common'); var stream = require('../../'); +var assert = require('assert/'); // A consumer stream with a very low highWaterMark, which starts in a state // where it buffers the chunk it receives rather than indicating that they @@ -25,6 +26,7 @@ var readable = new stream.Readable({ readable.pipe(writable); readable.once('pause', common.mustCall(function () { + assert.strictEqual(readable._readableState.awaitDrain, 1, 'awaitDrain doesn\'t increase'); // First pause, resume manually. The next write() to writable will still // return false, because chunks are still being buffered, so it will increase // the awaitDrain counter again. @@ -33,6 +35,7 @@ readable.once('pause', common.mustCall(function () { })); readable.once('pause', common.mustCall(function () { + assert.strictEqual(readable._readableState.awaitDrain, 1, '.resume() does not reset counter'); // Second pause, handle all chunks from now on. Once all callbacks that // are currently queued up are handled, the awaitDrain drain counter should // fall back to 0 and all chunks that are pending on the readable side @@ -71,5 +74,6 @@ readable.push(bufferShim.alloc(100)); // Should get through to the writable. readable.push(null); writable.on('finish', common.mustCall(function () { + assert.strictEqual(readable._readableState.awaitDrain, 0, 'awaitDrain not 0 after all chunks are written'); // Everything okay, all chunks were written. })); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-await-drain-push-while-write.js b/test/parallel/test-stream-pipe-await-drain-push-while-write.js index c29670b846..def25218b9 100644 --- a/test/parallel/test-stream-pipe-await-drain-push-while-write.js +++ b/test/parallel/test-stream-pipe-await-drain-push-while-write.js @@ -3,6 +3,12 @@ var bufferShim = require('buffer-shims'); /**/ var common = require('../common'); var stream = require('../../'); +var assert = require('assert/'); + +var awaitDrainStates = [1, // after first chunk before callback +1, // after second chunk before callback +0 // resolving chunk pushed after first chunk, awaitDrain is decreased +]; // A writable stream which pushes data onto the stream which pipes into it, // but only the first time it's written to. Since it's not paused at this time, @@ -12,8 +18,14 @@ var writable = new stream.Writable({ write: common.mustCall(function (chunk, encoding, cb) { if (chunk.length === 32 * 1024) { // first chunk - readable.push(new Buffer(33 * 1024)); // above hwm + var beforePush = readable._readableState.awaitDrain; + readable.push(new Buffer(34 * 1024)); // above hwm + // We should check if awaitDrain counter is increased. + var afterPush = readable._readableState.awaitDrain; + assert.strictEqual(afterPush - beforePush, 1, 'Counter is not increased for awaitDrain'); } + + assert.strictEqual(awaitDrainStates.shift(), readable._readableState.awaitDrain, 'State variable awaitDrain is not correct.'); cb(); }, 3) }); diff --git a/test/parallel/test-stream-pipe-await-drain.js b/test/parallel/test-stream-pipe-await-drain.js index 3e8afb613a..55a749ee8c 100644 --- a/test/parallel/test-stream-pipe-await-drain.js +++ b/test/parallel/test-stream-pipe-await-drain.js @@ -3,12 +3,14 @@ var bufferShim = require('buffer-shims'); /**/ var common = require('../common'); var stream = require('../../'); +var assert = require('assert/'); // This is very similar to test-stream-pipe-cleanup-pause.js. var reader = new stream.Readable(); var writer1 = new stream.Writable(); var writer2 = new stream.Writable(); +var writer3 = new stream.Writable(); // 560000 is chosen here because it is larger than the (default) highWaterMark // and will cause `.write()` to return false @@ -21,7 +23,9 @@ writer1._write = common.mustCall(function (chunk, encoding, cb) { this.emit('chunk-received'); cb(); }, 1); + writer1.once('chunk-received', function () { + assert.strictEqual(reader._readableState.awaitDrain, 0, 'initial value is not 0'); setImmediate(function () { // This one should *not* get through to writer1 because writer2 is not // "done" processing. @@ -31,12 +35,20 @@ writer1.once('chunk-received', function () { // A "slow" consumer: writer2._write = common.mustCall(function (chunk, encoding, cb) { + assert.strictEqual(reader._readableState.awaitDrain, 1, 'awaitDrain isn\'t 1 after first push'); // Not calling cb here to "simulate" slow stream. + // This should be called exactly once, since the first .write() call + // will return false. +}, 1); +writer3._write = common.mustCall(function (chunk, encoding, cb) { + assert.strictEqual(reader._readableState.awaitDrain, 2, 'awaitDrain isn\'t 2 after second push'); + // Not calling cb here to "simulate" slow stream. // This should be called exactly once, since the first .write() call // will return false. }, 1); reader.pipe(writer1); reader.pipe(writer2); +reader.pipe(writer3); reader.push(buffer); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-error-handling.js b/test/parallel/test-stream-pipe-error-handling.js index ce0ac55b2f..06ec6de1b1 100644 --- a/test/parallel/test-stream-pipe-error-handling.js +++ b/test/parallel/test-stream-pipe-error-handling.js @@ -40,56 +40,52 @@ var Stream = require('stream').Stream; } { - (function () { - var R = require('../../').Readable; - var W = require('../../').Writable; - - var r = new R(); - var w = new W(); - var removed = false; - - r._read = common.mustCall(function () { - setTimeout(common.mustCall(function () { - assert(removed); - assert.throws(function () { - w.emit('error', new Error('fail')); - }); - })); - }); - - w.on('error', myOnError); - r.pipe(w); - w.removeListener('error', myOnError); - removed = true; - - function myOnError(er) { - throw new Error('this should not happen'); - } - })(); -} - -{ - (function () { - var R = require('../../').Readable; - var W = require('../../').Writable; + var R = require('../../').Readable; + var W = require('../../').Writable; - var r = new R(); - var w = new W(); - var removed = false; + var r = new R(); + var w = new W(); + var removed = false; - r._read = common.mustCall(function () { - setTimeout(common.mustCall(function () { - assert(removed); + r._read = common.mustCall(function () { + setTimeout(common.mustCall(function () { + assert(removed); + assert.throws(function () { w.emit('error', new Error('fail')); - })); - }); + }, /^Error: fail$/); + }), 1); + }); + + w.on('error', myOnError); + r.pipe(w); + w.removeListener('error', myOnError); + removed = true; + + function myOnError() { + throw new Error('this should not happen'); + } +} + +{ + var _R = require('../../').Readable; + var _W = require('../../').Writable; + + var _r = new _R(); + var _w = new _W(); + var _removed = false; + + _r._read = common.mustCall(function () { + setTimeout(common.mustCall(function () { + assert(_removed); + _w.emit('error', new Error('fail')); + }), 1); + }); - w.on('error', common.mustCall(function (er) {})); - w._write = function () {}; + _w.on('error', common.mustCall(function () {})); + _w._write = function () {}; - r.pipe(w); - // Removing some OTHER random listener should not do anything - w.removeListener('error', function () {}); - removed = true; - })(); + _r.pipe(_w); + // Removing some OTHER random listener should not do anything + _w.removeListener('error', function () {}); + _removed = true; } \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-unpipe-streams.js b/test/parallel/test-stream-pipe-unpipe-streams.js index 2dc5cd9342..143fea0af1 100644 --- a/test/parallel/test-stream-pipe-unpipe-streams.js +++ b/test/parallel/test-stream-pipe-unpipe-streams.js @@ -29,7 +29,7 @@ source.unpipe(dest2); assert.strictEqual(source._readableState.pipes, dest1); assert.notStrictEqual(source._readableState.pipes, dest2); -dest2.on('unpipe', common.fail); +dest2.on('unpipe', common.mustNotCall()); source.unpipe(dest2); source.unpipe(dest1); diff --git a/test/parallel/test-stream-push-strings.js b/test/parallel/test-stream-push-strings.js index 092ed41f4d..0dae68fa93 100644 --- a/test/parallel/test-stream-push-strings.js +++ b/test/parallel/test-stream-push-strings.js @@ -35,7 +35,7 @@ MyStream.prototype._read = function (n) { var ms = new MyStream(); var results = []; ms.on('readable', function () { - var chunk; + var chunk = void 0; while (null !== (chunk = ms.read())) { results.push(chunk + ''); } @@ -43,7 +43,7 @@ ms.on('readable', function () { var expect = ['first chunksecond to last chunk', 'last chunk']; process.on('exit', function () { - assert.equal(ms._chunks, -1); + assert.strictEqual(ms._chunks, -1); assert.deepStrictEqual(results, expect); console.log('ok'); }); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-constructor-set-methods.js b/test/parallel/test-stream-readable-constructor-set-methods.js index f101f1ead6..b7adb1445b 100644 --- a/test/parallel/test-stream-readable-constructor-set-methods.js +++ b/test/parallel/test-stream-readable-constructor-set-methods.js @@ -16,6 +16,6 @@ var r = new Readable({ read: _read }); r.resume(); process.on('exit', function () { - assert.equal(r._read, _read); + assert.strictEqual(r._read, _read); assert(_readCalled); }); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-event.js b/test/parallel/test-stream-readable-event.js index 1560cefdeb..7f8b2f032b 100644 --- a/test/parallel/test-stream-readable-event.js +++ b/test/parallel/test-stream-readable-event.js @@ -7,66 +7,60 @@ var assert = require('assert/'); var Readable = require('../../').Readable; { - (function () { - // First test, not reading when the readable is added. - // make sure that on('readable', ...) triggers a readable event. - var r = new Readable({ - highWaterMark: 3 - }); + // First test, not reading when the readable is added. + // make sure that on('readable', ...) triggers a readable event. + var r = new Readable({ + highWaterMark: 3 + }); - r._read = common.fail; + r._read = common.mustNotCall(); - // This triggers a 'readable' event, which is lost. - r.push(bufferShim.from('blerg')); + // This triggers a 'readable' event, which is lost. + r.push(bufferShim.from('blerg')); - setTimeout(function () { - // we're testing what we think we are - assert(!r._readableState.reading); - r.on('readable', common.mustCall(function () {})); - }); - })(); + setTimeout(function () { + // we're testing what we think we are + assert(!r._readableState.reading); + r.on('readable', common.mustCall(function () {})); + }, 1); } { - (function () { - // second test, make sure that readable is re-emitted if there's - // already a length, while it IS reading. + // second test, make sure that readable is re-emitted if there's + // already a length, while it IS reading. - var r = new Readable({ - highWaterMark: 3 - }); + var _r = new Readable({ + highWaterMark: 3 + }); - r._read = common.mustCall(function (n) {}); + _r._read = common.mustCall(function (n) {}); - // This triggers a 'readable' event, which is lost. - r.push(bufferShim.from('bl')); + // This triggers a 'readable' event, which is lost. + _r.push(bufferShim.from('bl')); - setTimeout(function () { - // assert we're testing what we think we are - assert(r._readableState.reading); - r.on('readable', common.mustCall(function () {})); - }); - })(); + setTimeout(function () { + // assert we're testing what we think we are + assert(_r._readableState.reading); + _r.on('readable', common.mustCall(function () {})); + }, 1); } { - (function () { - // Third test, not reading when the stream has not passed - // the highWaterMark but *has* reached EOF. - var r = new Readable({ - highWaterMark: 30 - }); + // Third test, not reading when the stream has not passed + // the highWaterMark but *has* reached EOF. + var _r2 = new Readable({ + highWaterMark: 30 + }); - r._read = common.fail; + _r2._read = common.mustNotCall(); - // This triggers a 'readable' event, which is lost. - r.push(bufferShim.from('blerg')); - r.push(null); + // This triggers a 'readable' event, which is lost. + _r2.push(bufferShim.from('blerg')); + _r2.push(null); - setTimeout(function () { - // assert we're testing what we think we are - assert(!r._readableState.reading); - r.on('readable', common.mustCall(function () {})); - }); - })(); + setTimeout(function () { + // assert we're testing what we think we are + assert(!_r2._readableState.reading); + _r2.on('readable', common.mustCall(function () {})); + }, 1); } \ No newline at end of file diff --git a/test/parallel/test-stream-readable-flow-recursion.js b/test/parallel/test-stream-readable-flow-recursion.js index fca03cdbfa..d318d9e552 100644 --- a/test/parallel/test-stream-readable-flow-recursion.js +++ b/test/parallel/test-stream-readable-flow-recursion.js @@ -41,12 +41,12 @@ flow(stream, 5000, function () { }); process.on('exit', function (code) { - assert.equal(reads, 2); + assert.strictEqual(reads, 2); // we pushed up the high water mark - assert.equal(stream._readableState.highWaterMark, 8192); + assert.strictEqual(stream._readableState.highWaterMark, 8192); // length is 0 right now, because we pulled it all out. - assert.equal(stream._readableState.length, 0); + assert.strictEqual(stream._readableState.length, 0); assert(!code); - assert.equal(depth, 0); + assert.strictEqual(depth, 0); console.log('ok'); }); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-needReadable.js b/test/parallel/test-stream-readable-needReadable.js index 5eb68a49de..2e967975e6 100644 --- a/test/parallel/test-stream-readable-needReadable.js +++ b/test/parallel/test-stream-readable-needReadable.js @@ -76,7 +76,7 @@ var slowProducer = new Readable({ slowProducer.on('readable', common.mustCall(function () { if (slowProducer.read(8) === null) { - // The buffer doesn't have enough data, and the stream is not ened, + // The buffer doesn't have enough data, and the stream is not need, // we need to notify the reader when data arrives. assert.strictEqual(slowProducer._readableState.needReadable, true); } else { diff --git a/test/parallel/test-stream-readable-resumeScheduled.js b/test/parallel/test-stream-readable-resumeScheduled.js index 6e2f0c3e5a..2943e9d197 100644 --- a/test/parallel/test-stream-readable-resumeScheduled.js +++ b/test/parallel/test-stream-readable-resumeScheduled.js @@ -12,71 +12,65 @@ var _require = require('../../'), Writable = _require.Writable; { - (function () { - // pipe() test case - var r = new Readable({ - read: function () {} - }); - var w = new Writable(); - - // resumeScheduled should start = `false`. - assert.strictEqual(r._readableState.resumeScheduled, false); + // pipe() test case + var r = new Readable({ + read: function () {} + }); + var w = new Writable(); + + // resumeScheduled should start = `false`. + assert.strictEqual(r._readableState.resumeScheduled, false); - // calling pipe() should change the state value = true. - r.pipe(w); - assert.strictEqual(r._readableState.resumeScheduled, true); + // calling pipe() should change the state value = true. + r.pipe(w); + assert.strictEqual(r._readableState.resumeScheduled, true); - process.nextTick(common.mustCall(function () { - assert.strictEqual(r._readableState.resumeScheduled, false); - })); - })(); + process.nextTick(common.mustCall(function () { + assert.strictEqual(r._readableState.resumeScheduled, false); + })); } { - (function () { - // 'data' listener test case - var r = new Readable({ - read: function () {} - }); + // 'data' listener test case + var _r = new Readable({ + read: function () {} + }); - // resumeScheduled should start = `false`. - assert.strictEqual(r._readableState.resumeScheduled, false); + // resumeScheduled should start = `false`. + assert.strictEqual(_r._readableState.resumeScheduled, false); - r.push(bufferShim.from([1, 2, 3])); + _r.push(bufferShim.from([1, 2, 3])); - // adding 'data' listener should change the state value - r.on('data', common.mustCall(function () { - assert.strictEqual(r._readableState.resumeScheduled, false); - })); - assert.strictEqual(r._readableState.resumeScheduled, true); + // adding 'data' listener should change the state value + _r.on('data', common.mustCall(function () { + assert.strictEqual(_r._readableState.resumeScheduled, false); + })); + assert.strictEqual(_r._readableState.resumeScheduled, true); - process.nextTick(common.mustCall(function () { - assert.strictEqual(r._readableState.resumeScheduled, false); - })); - })(); + process.nextTick(common.mustCall(function () { + assert.strictEqual(_r._readableState.resumeScheduled, false); + })); } { - (function () { - // resume() test case - var r = new Readable({ - read: function () {} - }); - - // resumeScheduled should start = `false`. - assert.strictEqual(r._readableState.resumeScheduled, false); - - // Calling resume() should change the state value. - r.resume(); - assert.strictEqual(r._readableState.resumeScheduled, true); - - r.on('resume', common.mustCall(function () { - // The state value should be `false` again - assert.strictEqual(r._readableState.resumeScheduled, false); - })); - - process.nextTick(common.mustCall(function () { - assert.strictEqual(r._readableState.resumeScheduled, false); - })); - })(); + // resume() test case + var _r2 = new Readable({ + read: function () {} + }); + + // resumeScheduled should start = `false`. + assert.strictEqual(_r2._readableState.resumeScheduled, false); + + // Calling resume() should change the state value. + _r2.resume(); + assert.strictEqual(_r2._readableState.resumeScheduled, true); + + _r2.on('resume', common.mustCall(function () { + // The state value should be `false` again + assert.strictEqual(_r2._readableState.resumeScheduled, false); + })); + + process.nextTick(common.mustCall(function () { + assert.strictEqual(_r2._readableState.resumeScheduled, false); + })); } \ No newline at end of file diff --git a/test/parallel/test-stream-transform-constructor-set-methods.js b/test/parallel/test-stream-transform-constructor-set-methods.js index 108e7d4fe5..69bb6a2e42 100644 --- a/test/parallel/test-stream-transform-constructor-set-methods.js +++ b/test/parallel/test-stream-transform-constructor-set-methods.js @@ -23,12 +23,18 @@ var t = new Transform({ flush: _flush }); +var t2 = new Transform({}); + t.end(bufferShim.from('blerg')); t.resume(); +assert.throws(function () { + t2.end(bufferShim.from('blerg')); +}, /^Error: _transform\(\) is not implemented$/); + process.on('exit', function () { - assert.equal(t._transform, _transform); - assert.equal(t._flush, _flush); - assert(_transformCalled); - assert(_flushCalled); + assert.strictEqual(t._transform, _transform); + assert.strictEqual(t._flush, _flush); + assert.strictEqual(_transformCalled, true); + assert.strictEqual(_flushCalled, true); }); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-objectmode-falsey-value.js b/test/parallel/test-stream-transform-objectmode-falsey-value.js index 70e5d46ced..4fdfb2f6ea 100644 --- a/test/parallel/test-stream-transform-objectmode-falsey-value.js +++ b/test/parallel/test-stream-transform-objectmode-falsey-value.js @@ -1,7 +1,7 @@ /**/ var bufferShim = require('buffer-shims'); /**/ -require('../common'); +var common = require('../common'); var assert = require('assert/'); var stream = require('../../'); @@ -13,23 +13,20 @@ var dest = new PassThrough({ objectMode: true }); var expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; var results = []; -process.on('exit', function () { - assert.deepStrictEqual(results, expect); - console.log('ok'); -}); -dest.on('data', function (x) { +dest.on('data', common.mustCall(function (x) { results.push(x); -}); +}, expect.length)); src.pipe(tx).pipe(dest); var i = -1; -var int = setInterval(function () { - if (i > 10) { +var int = setInterval(common.mustCall(function () { + if (results.length === expect.length) { src.end(); clearInterval(int); + assert.deepStrictEqual(results, expect); } else { src.write(i++); } -}); \ No newline at end of file +}, expect.length + 1), 1); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-split-objectmode.js b/test/parallel/test-stream-transform-split-objectmode.js index 161c166874..ea92e385b3 100644 --- a/test/parallel/test-stream-transform-split-objectmode.js +++ b/test/parallel/test-stream-transform-split-objectmode.js @@ -17,7 +17,7 @@ parser._transform = function (chunk, enc, callback) { callback(null, { val: chunk[0] }); }; -var parsed; +var parsed = void 0; parser.on('data', function (obj) { parsed = obj; @@ -40,7 +40,7 @@ serializer._transform = function (obj, _, callback) { callback(null, bufferShim.from([obj.val])); }; -var serialized; +var serialized = void 0; serializer.on('data', function (chunk) { serialized = chunk; diff --git a/test/parallel/test-stream-unshift-empty-chunk.js b/test/parallel/test-stream-unshift-empty-chunk.js index 7a9d8b3359..0695193125 100644 --- a/test/parallel/test-stream-unshift-empty-chunk.js +++ b/test/parallel/test-stream-unshift-empty-chunk.js @@ -21,7 +21,7 @@ r._read = function (n) { var readAll = false; var seen = []; r.on('readable', function () { - var chunk; + var chunk = void 0; while (chunk = r.read()) { seen.push(chunk.toString()); // simulate only reading a certain amount of the data, diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js index fe0fc842b5..e6efd962c8 100644 --- a/test/parallel/test-stream-unshift-read-race.js +++ b/test/parallel/test-stream-unshift-read-race.js @@ -1,7 +1,7 @@ /**/ var bufferShim = require('buffer-shims'); /**/ -require('../common'); +var common = require('../common'); var assert = require('assert/'); // This test verifies that: @@ -43,7 +43,7 @@ r._read = function (n) { pos += n; r.push(c); if (c === null) pushError(); - }); + }, 1); } } }; @@ -51,7 +51,7 @@ r._read = function (n) { function pushError() { assert.throws(function () { r.push(bufferShim.allocUnsafe(1)); - }); + }, /^Error: stream.push\(\) after EOF$/); } var w = stream.Writable(); @@ -61,39 +61,34 @@ w._write = function (chunk, encoding, cb) { cb(); }; -var ended = false; -r.on('end', function () { - assert(!ended, 'end emitted more than once'); +r.on('end', common.mustCall(function () { assert.throws(function () { r.unshift(bufferShim.allocUnsafe(1)); - }); - ended = true; + }, /^Error: stream.unshift\(\) after end event$/); w.end(); -}); +})); r.on('readable', function () { - var chunk; + var chunk = void 0; while (null !== (chunk = r.read(10))) { w.write(chunk); if (chunk.length > 4) r.unshift(bufferShim.from('1234')); } }); -var finished = false; -w.on('finish', function () { - finished = true; +w.on('finish', common.mustCall(function () { // each chunk should start with 1234, and then be asfdasdfasdf... // The first got pulled out before the first unshift('1234'), so it's // lacking that piece. - assert.equal(written[0], 'asdfasdfas'); + assert.strictEqual(written[0], 'asdfasdfas'); var asdf = 'd'; console.error('0: %s', written[0]); - for (var i = 1; i < written.length; i++) { - console.error('%s: %s', i.toString(32), written[i]); - assert.equal(written[i].slice(0, 4), '1234'); - for (var j = 4; j < written[i].length; j++) { - var c = written[i].charAt(j); - assert.equal(c, asdf); + for (var _i = 1; _i < written.length; _i++) { + console.error('%s: %s', _i.toString(32), written[_i]); + assert.strictEqual(written[_i].slice(0, 4), '1234'); + for (var j = 4; j < written[_i].length; j++) { + var _c = written[_i].charAt(j); + assert.strictEqual(_c, asdf); switch (asdf) { case 'a': asdf = 's';break; @@ -106,11 +101,9 @@ w.on('finish', function () { } } } -}); +})); process.on('exit', function () { - assert.equal(written.length, 18); - assert(ended, 'stream ended'); - assert(finished, 'stream finished'); + assert.strictEqual(written.length, 18); console.log('ok'); }); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js index c7852015d8..e4d4f2e43b 100644 --- a/test/parallel/test-stream-writable-change-default-encoding.js +++ b/test/parallel/test-stream-writable-change-default-encoding.js @@ -21,7 +21,7 @@ MyWritable.prototype._write = function (chunk, encoding, callback) { (function defaultCondingIsUtf8() { var m = new MyWritable(function (isBuffer, type, enc) { - assert.equal(enc, 'utf8'); + assert.strictEqual(enc, 'utf8'); }, { decodeStrings: false }); m.write('foo'); m.end(); @@ -29,7 +29,7 @@ MyWritable.prototype._write = function (chunk, encoding, callback) { (function changeDefaultEncodingToAscii() { var m = new MyWritable(function (isBuffer, type, enc) { - assert.equal(enc, 'ascii'); + assert.strictEqual(enc, 'ascii'); }, { decodeStrings: false }); m.setDefaultEncoding('ascii'); m.write('bar'); @@ -45,7 +45,7 @@ assert.throws(function changeDefaultEncodingToInvalidValue() { (function checkVairableCaseEncoding() { var m = new MyWritable(function (isBuffer, type, enc) { - assert.equal(enc, 'ascii'); + assert.strictEqual(enc, 'ascii'); }, { decodeStrings: false }); m.setDefaultEncoding('AsCii'); m.write('bar'); diff --git a/test/parallel/test-stream-writable-constructor-set-methods.js b/test/parallel/test-stream-writable-constructor-set-methods.js index 48be0b8e04..60a24e6d50 100644 --- a/test/parallel/test-stream-writable-constructor-set-methods.js +++ b/test/parallel/test-stream-writable-constructor-set-methods.js @@ -29,9 +29,9 @@ w2.write(bufferShim.from('blerg')); w2.end(); process.on('exit', function () { - assert.equal(w._write, _write); + assert.strictEqual(w._write, _write); assert(_writeCalled); - assert.equal(w2._writev, _writev); - assert.equal(dLength, 2); + assert.strictEqual(w2._writev, _writev); + assert.strictEqual(dLength, 2); assert(_writevCalled); }); \ No newline at end of file diff --git a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js new file mode 100644 index 0000000000..2be5e8abab --- /dev/null +++ b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js @@ -0,0 +1,59 @@ +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +var common = require('../common'); +var assert = require('assert/'); +var stream = require('../../'); + +var writable = new stream.Writable(); + +writable._writev = common.mustCall(function (chunks, cb) { + assert.strictEqual(chunks.length, 2, 'two chunks to write'); + cb(); +}, 1); + +writable._write = common.mustCall(function (chunk, encoding, cb) { + cb(); +}, 1); + +// first cork +writable.cork(); +assert.strictEqual(writable._writableState.corked, 1); +assert.strictEqual(writable._writableState.bufferedRequestCount, 0); + +// cork again +writable.cork(); +assert.strictEqual(writable._writableState.corked, 2); + +// the first chunk is buffered +writable.write('first chunk'); +assert.strictEqual(writable._writableState.bufferedRequestCount, 1); + +// first uncork does nothing +writable.uncork(); +assert.strictEqual(writable._writableState.corked, 1); +assert.strictEqual(writable._writableState.bufferedRequestCount, 1); + +process.nextTick(uncork); + +// the second chunk is buffered, because we uncork at the end of tick +writable.write('second chunk'); +assert.strictEqual(writable._writableState.corked, 1); +assert.strictEqual(writable._writableState.bufferedRequestCount, 2); + +function uncork() { + // second uncork flushes the buffer + writable.uncork(); + assert.strictEqual(writable._writableState.corked, 0); + assert.strictEqual(writable._writableState.bufferedRequestCount, 0); + + // verify that end() uncorks correctly + writable.cork(); + writable.write('third chunk'); + writable.end(); + + // end causes an uncork() as well + assert.strictEqual(writable._writableState.corked, 0); + assert.strictEqual(writable._writableState.bufferedRequestCount, 0); +} \ No newline at end of file diff --git a/test/parallel/test-stream-writev.js b/test/parallel/test-stream-writev.js index 2088479189..437f8385dd 100644 --- a/test/parallel/test-stream-writev.js +++ b/test/parallel/test-stream-writev.js @@ -1,7 +1,7 @@ /**/ var bufferShim = require('buffer-shims'); /**/ -require('../common'); +var common = require('../common'); var assert = require('assert/'); var stream = require('../../'); @@ -30,16 +30,14 @@ function test(decode, uncork, multi, next) { expectCount++; var expect = expectCount; return function (er) { - if (er) throw er; + assert.ifError(er); counter++; - assert.equal(counter, expect); + assert.strictEqual(counter, expect); }; } var w = new stream.Writable({ decodeStrings: decode }); - w._write = function (chunk, e, cb) { - assert(false, 'Should not call _write'); - }; + w._write = common.mustNotCall('Should not call _write'); var expectChunks = decode ? [{ encoding: 'buffer', chunk: [104, 101, 108, 108, 111, 44, 32] }, { encoding: 'buffer', @@ -48,7 +46,7 @@ function test(decode, uncork, multi, next) { chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] }, { encoding: 'buffer', chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] }] : [{ encoding: 'ascii', chunk: 'hello, ' }, { encoding: 'utf8', chunk: 'world' }, { encoding: 'buffer', chunk: [33] }, { encoding: 'binary', chunk: '\nand then...' }, { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' }]; - var actualChunks; + var actualChunks = void 0; w._writev = function (chunks, cb) { actualChunks = chunks.map(function (chunk) { return { diff --git a/test/parallel/test-stream2-base64-single-char-read-end.js b/test/parallel/test-stream2-base64-single-char-read-end.js index 94f89c9c79..4e2284d3a2 100644 --- a/test/parallel/test-stream2-base64-single-char-read-end.js +++ b/test/parallel/test-stream2-base64-single-char-read-end.js @@ -10,7 +10,6 @@ var src = new R({ encoding: 'base64' }); var dst = new W(); var hasRead = false; var accum = []; -var timeout; src._read = function (n) { if (!hasRead) { @@ -28,12 +27,12 @@ dst._write = function (chunk, enc, cb) { }; src.on('end', function () { - assert.equal(Buffer.concat(accum) + '', 'MQ=='); + assert.strictEqual(Buffer.concat(accum) + '', 'MQ=='); clearTimeout(timeout); }); src.pipe(dst); -timeout = setTimeout(function () { +var timeout = setTimeout(function () { common.fail('timed out waiting for _write'); }, 100); \ No newline at end of file diff --git a/test/parallel/test-stream2-compatibility.js b/test/parallel/test-stream2-compatibility.js index 41c41c5510..338a8cdd93 100644 --- a/test/parallel/test-stream2-compatibility.js +++ b/test/parallel/test-stream2-compatibility.js @@ -28,7 +28,7 @@ TestReader.prototype._read = function (n) { var reader = new TestReader(); setImmediate(function () { - assert.equal(ondataCalled, 1); + assert.strictEqual(ondataCalled, 1); console.log('ok'); reader.push(null); }); diff --git a/test/parallel/test-stream2-large-read-stall.js b/test/parallel/test-stream2-large-read-stall.js index d72c18bc8e..9e152bac33 100644 --- a/test/parallel/test-stream2-large-read-stall.js +++ b/test/parallel/test-stream2-large-read-stall.js @@ -22,16 +22,19 @@ r._read = push; r.on('readable', function () { ;false && console.error('>> readable'); + var ret = void 0; do { ;false && console.error(' > read(%d)', READSIZE); - var ret = r.read(READSIZE); + ret = r.read(READSIZE); ;false && console.error(' < %j (%d remain)', ret && ret.length, rs.length); } while (ret && ret.length === READSIZE); ;false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length); }); -r.on('end', common.mustCall(function () {})); +r.on('end', common.mustCall(function () { + assert.strictEqual(pushes, PUSHCOUNT + 1); +})); var pushes = 0; function push() { @@ -43,9 +46,5 @@ function push() { } ;false && console.error(' push #%d', pushes); - if (r.push(bufferShim.allocUnsafe(PUSHSIZE))) setTimeout(push); -} - -process.on('exit', function () { - assert.equal(pushes, PUSHCOUNT + 1); -}); \ No newline at end of file + if (r.push(bufferShim.allocUnsafe(PUSHSIZE))) setTimeout(push, 1); +} \ No newline at end of file diff --git a/test/parallel/test-stream2-objects.js b/test/parallel/test-stream2-objects.js index 48bc585dc6..622afcf819 100644 --- a/test/parallel/test-stream2-objects.js +++ b/test/parallel/test-stream2-objects.js @@ -24,7 +24,7 @@ function run() { console.log('# %s', name); fn({ same: assert.deepStrictEqual, - equal: assert.equal, + equal: assert.strictEqual, end: function () { count--; run(); diff --git a/test/parallel/test-stream2-pipe-error-handling.js b/test/parallel/test-stream2-pipe-error-handling.js index 993b650313..e04282b9bf 100644 --- a/test/parallel/test-stream2-pipe-error-handling.js +++ b/test/parallel/test-stream2-pipe-error-handling.js @@ -6,86 +6,82 @@ var assert = require('assert/'); var stream = require('../../'); { - (function () { - var count = 1000; - - var source = new stream.Readable(); - source._read = function (n) { - n = Math.min(count, n); - count -= n; - source.push(bufferShim.allocUnsafe(n)); - }; - - var unpipedDest = void 0; - source.unpipe = function (dest) { - unpipedDest = dest; - stream.Readable.prototype.unpipe.call(this, dest); - }; - - var dest = new stream.Writable(); - dest._write = function (chunk, encoding, cb) { - cb(); - }; - - source.pipe(dest); - - var gotErr = null; - dest.on('error', function (err) { - gotErr = err; - }); - - var unpipedSource = void 0; - dest.on('unpipe', function (src) { - unpipedSource = src; - }); - - var err = new Error('This stream turned into bacon.'); - dest.emit('error', err); - assert.strictEqual(gotErr, err); - assert.strictEqual(unpipedSource, source); - assert.strictEqual(unpipedDest, dest); - })(); + var count = 1000; + + var source = new stream.Readable(); + source._read = function (n) { + n = Math.min(count, n); + count -= n; + source.push(bufferShim.allocUnsafe(n)); + }; + + var unpipedDest = void 0; + source.unpipe = function (dest) { + unpipedDest = dest; + stream.Readable.prototype.unpipe.call(this, dest); + }; + + var dest = new stream.Writable(); + dest._write = function (chunk, encoding, cb) { + cb(); + }; + + source.pipe(dest); + + var gotErr = null; + dest.on('error', function (err) { + gotErr = err; + }); + + var unpipedSource = void 0; + dest.on('unpipe', function (src) { + unpipedSource = src; + }); + + var err = new Error('This stream turned into bacon.'); + dest.emit('error', err); + assert.strictEqual(gotErr, err); + assert.strictEqual(unpipedSource, source); + assert.strictEqual(unpipedDest, dest); } { - (function () { - var count = 1000; - - var source = new stream.Readable(); - source._read = function (n) { - n = Math.min(count, n); - count -= n; - source.push(bufferShim.allocUnsafe(n)); - }; - - var unpipedDest = void 0; - source.unpipe = function (dest) { - unpipedDest = dest; - stream.Readable.prototype.unpipe.call(this, dest); - }; - - var dest = new stream.Writable(); - dest._write = function (chunk, encoding, cb) { - cb(); - }; - - source.pipe(dest); - - var unpipedSource = void 0; - dest.on('unpipe', function (src) { - unpipedSource = src; - }); - - var err = new Error('This stream turned into bacon.'); - - var gotErr = null; - try { - dest.emit('error', err); - } catch (e) { - gotErr = e; - } - assert.strictEqual(gotErr, err); - assert.strictEqual(unpipedSource, source); - assert.strictEqual(unpipedDest, dest); - })(); + var _count = 1000; + + var _source = new stream.Readable(); + _source._read = function (n) { + n = Math.min(_count, n); + _count -= n; + _source.push(bufferShim.allocUnsafe(n)); + }; + + var _unpipedDest = void 0; + _source.unpipe = function (dest) { + _unpipedDest = dest; + stream.Readable.prototype.unpipe.call(this, dest); + }; + + var _dest = new stream.Writable(); + _dest._write = function (chunk, encoding, cb) { + cb(); + }; + + _source.pipe(_dest); + + var _unpipedSource = void 0; + _dest.on('unpipe', function (src) { + _unpipedSource = src; + }); + + var _err = new Error('This stream turned into bacon.'); + + var _gotErr = null; + try { + _dest.emit('error', _err); + } catch (e) { + _gotErr = e; + } + assert.strictEqual(_gotErr, _err); + assert.strictEqual(_unpipedSource, _source); + assert.strictEqual(_unpipedDest, _dest); } \ No newline at end of file diff --git a/test/parallel/test-stream2-push.js b/test/parallel/test-stream2-push.js index aa276258c3..9c0faaa6b8 100644 --- a/test/parallel/test-stream2-push.js +++ b/test/parallel/test-stream2-push.js @@ -2,16 +2,15 @@ var bufferShim = require('buffer-shims'); /**/ require('../common'); -var stream = require('../../'); -var Readable = stream.Readable; -var Writable = stream.Writable; +var Readable = require('../../').Readable; +var Writable = require('../../').Writable; var assert = require('assert/'); var EE = require('events').EventEmitter; // a mock thing a bit like the net.Socket/tcp_wrap.handle interaction -stream = new Readable({ +var stream = new Readable({ highWaterMark: 16, encoding: 'utf8' }); diff --git a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js index a940eef012..35b7f92d82 100644 --- a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js +++ b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js @@ -75,13 +75,13 @@ function test2() { var r = new Readable({ encoding: 'base64' }); var reads = 5; r._read = function (n) { - if (! reads--) return r.push(null); // EOF + if (!reads--) return r.push(null); // EOF else return r.push(bufferShim.from('x')); }; var results = []; function flow() { - var chunk; + var chunk = void 0; while (null !== (chunk = r.read())) { results.push(chunk + ''); } diff --git a/test/parallel/test-stream2-readable-from-list.js b/test/parallel/test-stream2-readable-from-list.js index 5ee6841561..d91e810653 100644 --- a/test/parallel/test-stream2-readable-from-list.js +++ b/test/parallel/test-stream2-readable-from-list.js @@ -25,7 +25,7 @@ function run() { console.log('# %s', name); fn({ same: assert.deepStrictEqual, - equal: assert.equal, + equal: assert.strictEqual, end: function () { count--; run(); @@ -42,7 +42,7 @@ function bufferListFromArray(arr) { // ensure all tests have run process.on('exit', function () { - assert.equal(count, 0); + assert.strictEqual(count, 0); }); process.nextTick(run); diff --git a/test/parallel/test-stream2-readable-non-empty-end.js b/test/parallel/test-stream2-readable-non-empty-end.js index 50e1380485..582e83acf8 100644 --- a/test/parallel/test-stream2-readable-non-empty-end.js +++ b/test/parallel/test-stream2-readable-non-empty-end.js @@ -18,7 +18,7 @@ test._read = function (size) { var chunk = chunks[n++]; setTimeout(function () { test.push(chunk === undefined ? null : chunk); - }); + }, 1); }; test.on('end', thrower); @@ -33,7 +33,7 @@ test.on('readable', function () { if (res) { bytesread += res.length; console.error('br=%d len=%d', bytesread, len); - setTimeout(next); + setTimeout(next, 1); } test.read(0); }); @@ -47,7 +47,7 @@ function next() { // one to get the last byte var r = test.read(); assert(r); - assert.equal(r.length, 1); + assert.strictEqual(r.length, 1); r = test.read(); - assert.equal(r, null); + assert.strictEqual(r, null); } \ No newline at end of file diff --git a/test/parallel/test-stream2-set-encoding.js b/test/parallel/test-stream2-set-encoding.js index e93541ef28..eb337a1f38 100644 --- a/test/parallel/test-stream2-set-encoding.js +++ b/test/parallel/test-stream2-set-encoding.js @@ -24,7 +24,7 @@ function run() { console.log('# %s', name); fn({ same: assert.deepStrictEqual, - equal: assert.equal, + equal: assert.strictEqual, end: function () { count--; run(); @@ -34,7 +34,7 @@ function run() { // ensure all tests have run process.on('exit', function () { - assert.equal(count, 0); + assert.strictEqual(count, 0); }); process.nextTick(run); @@ -82,7 +82,7 @@ test('setEncoding utf8', function (t) { var expect = ['aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa']; tr.on('readable', function flow() { - var chunk; + var chunk = void 0; while (null !== (chunk = tr.read(10))) { out.push(chunk); } @@ -101,7 +101,7 @@ test('setEncoding hex', function (t) { var expect = ['6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161']; tr.on('readable', function flow() { - var chunk; + var chunk = void 0; while (null !== (chunk = tr.read(10))) { out.push(chunk); } @@ -121,7 +121,7 @@ test('setEncoding hex with read(13)', function (t) { tr.on('readable', function flow() { console.log('readable once'); - var chunk; + var chunk = void 0; while (null !== (chunk = tr.read(13))) { out.push(chunk); } @@ -141,7 +141,7 @@ test('setEncoding base64', function (t) { var expect = ['YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYQ==']; tr.on('readable', function flow() { - var chunk; + var chunk = void 0; while (null !== (chunk = tr.read(10))) { out.push(chunk); } @@ -159,7 +159,7 @@ test('encoding: utf8', function (t) { var expect = ['aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa']; tr.on('readable', function flow() { - var chunk; + var chunk = void 0; while (null !== (chunk = tr.read(10))) { out.push(chunk); } @@ -177,7 +177,7 @@ test('encoding: hex', function (t) { var expect = ['6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161']; tr.on('readable', function flow() { - var chunk; + var chunk = void 0; while (null !== (chunk = tr.read(10))) { out.push(chunk); } @@ -195,7 +195,7 @@ test('encoding: hex with read(13)', function (t) { var expect = ['6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '16161']; tr.on('readable', function flow() { - var chunk; + var chunk = void 0; while (null !== (chunk = tr.read(13))) { out.push(chunk); } @@ -213,7 +213,7 @@ test('encoding: base64', function (t) { var expect = ['YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYQ==']; tr.on('readable', function flow() { - var chunk; + var chunk = void 0; while (null !== (chunk = tr.read(10))) { out.push(chunk); } diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js index 7c158277c3..27206e8997 100644 --- a/test/parallel/test-stream2-transform.js +++ b/test/parallel/test-stream2-transform.js @@ -24,7 +24,7 @@ function run() { console.log('# %s', name); fn({ same: assert.deepStrictEqual, - equal: assert.equal, + equal: assert.strictEqual, ok: assert, end: function () { count--; @@ -35,7 +35,7 @@ function run() { // ensure all tests have run process.on('exit', function () { - assert.equal(count, 0); + assert.strictEqual(count, 0); }); process.nextTick(run); diff --git a/test/parallel/test-stream2-unpipe-drain.js b/test/parallel/test-stream2-unpipe-drain.js index 02b96af963..1bef908137 100644 --- a/test/parallel/test-stream2-unpipe-drain.js +++ b/test/parallel/test-stream2-unpipe-drain.js @@ -2,16 +2,10 @@ /**/ var bufferShim = require('buffer-shims'); /**/ - var common = require('../common'); + require('../common'); var assert = require('assert/'); - var stream = require('../../'); - - if (!common.hasCrypto) { - common.skip('missing crypto'); - return; - } - var crypto = require('crypto'); + var stream = require('../../'); var util = require('util'); function TestWriter() { @@ -34,7 +28,7 @@ TestReader.prototype._read = function (size) { this.reads += 1; - this.push(crypto.randomBytes(size)); + this.push(bufferShim.alloc(size)); }; var src1 = new TestReader(); @@ -57,7 +51,7 @@ }); process.on('exit', function () { - assert.equal(src1.reads, 2); - assert.equal(src2.reads, 2); + assert.strictEqual(src1.reads, 2); + assert.strictEqual(src2.reads, 2); }); })(); \ No newline at end of file diff --git a/test/parallel/test-stream2-unpipe-leak.js b/test/parallel/test-stream2-unpipe-leak.js index 8811cd945e..1b670bd485 100644 --- a/test/parallel/test-stream2-unpipe-leak.js +++ b/test/parallel/test-stream2-unpipe-leak.js @@ -38,14 +38,14 @@ for (var i = 0; i < 10; i++) { src.unpipe(dest); } -assert.equal(src.listeners('end').length, 0); -assert.equal(src.listeners('readable').length, 0); - -assert.equal(dest.listeners('unpipe').length, 0); -assert.equal(dest.listeners('drain').length, 0); -assert.equal(dest.listeners('error').length, 0); -assert.equal(dest.listeners('close').length, 0); -assert.equal(dest.listeners('finish').length, 0); +assert.strictEqual(src.listeners('end').length, 0); +assert.strictEqual(src.listeners('readable').length, 0); + +assert.strictEqual(dest.listeners('unpipe').length, 0); +assert.strictEqual(dest.listeners('drain').length, 0); +assert.strictEqual(dest.listeners('error').length, 0); +assert.strictEqual(dest.listeners('close').length, 0); +assert.strictEqual(dest.listeners('finish').length, 0); console.error(src._readableState); process.on('exit', function () { diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js index cdf2433709..dde986cb3d 100644 --- a/test/parallel/test-stream2-writable.js +++ b/test/parallel/test-stream2-writable.js @@ -26,7 +26,7 @@ TestWriter.prototype._write = function (chunk, encoding, cb) { var chunks = new Array(50); for (var i = 0; i < chunks.length; i++) { - chunks[i] = new Array(i + 1).join('x'); + chunks[i] = 'x'.repeat(i); } // tiny node-tap lookalike. @@ -47,7 +47,7 @@ function run() { console.log('# %s', name); fn({ same: assert.deepStrictEqual, - equal: assert.equal, + equal: assert.strictEqual, end: function () { count--; run(); @@ -57,7 +57,7 @@ function run() { // ensure all tests have run process.on('exit', function () { - assert.equal(count, 0); + assert.strictEqual(count, 0); }); process.nextTick(run); @@ -115,8 +115,9 @@ test('write backpressure', function (t) { var i = 0; (function W() { + var ret = void 0; do { - var ret = tw.write(chunks[i++]); + ret = tw.write(chunks[i++]); } while (ret !== false && i < chunks.length); if (i < chunks.length) { @@ -175,7 +176,7 @@ test('write no bufferize', function (t) { test('write callbacks', function (t) { var callbacks = chunks.map(function (chunk, i) { - return [i, function (er) { + return [i, function () { callbacks._called[i] = chunk; }]; }).reduce(function (set, x) { @@ -246,7 +247,7 @@ test('end callback called after write callback', function (t) { test('encoding should be ignored for buffers', function (t) { var tw = new W(); var hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'; - tw._write = function (chunk, encoding, cb) { + tw._write = function (chunk) { t.equal(chunk.toString('hex'), hex); t.end(); }; @@ -258,7 +259,7 @@ test('writables are not pipable', function (t) { var w = new W(); w._write = function () {}; var gotError = false; - w.on('error', function (er) { + w.on('error', function () { gotError = true; }); w.pipe(process.stdout); @@ -271,7 +272,7 @@ test('duplexes are pipable', function (t) { d._read = function () {}; d._write = function () {}; var gotError = false; - d.on('error', function (er) { + d.on('error', function () { gotError = true; }); d.pipe(process.stdout); @@ -305,7 +306,7 @@ test('dont end while writing', function (t) { setTimeout(function () { this.writing = false; cb(); - }); + }, 1); }; w.on('finish', function () { assert(wrote); @@ -342,7 +343,7 @@ test('finish does not come before sync _write cb', function (t) { assert(writeCb); t.end(); }); - w.write(bufferShim.alloc(0), function (er) { + w.write(bufferShim.alloc(0), function () { writeCb = true; }); w.end(); diff --git a/test/parallel/test-stream3-cork-end.js b/test/parallel/test-stream3-cork-end.js index a635c77936..00fabfb7e5 100644 --- a/test/parallel/test-stream3-cork-end.js +++ b/test/parallel/test-stream3-cork-end.js @@ -24,7 +24,7 @@ w._write = function (chunk, encoding, cb) { // stream end event is not seen before the last write assert.ok(!seenEnd); // default encoding given none was specified - assert.equal(encoding, 'buffer'); + assert.strictEqual(encoding, 'buffer'); seenChunks.push(chunk); cb(); @@ -36,7 +36,7 @@ w.on('finish', function () { function writeChunks(remainingChunks, callback) { var writeChunk = remainingChunks.shift(); - var writeState; + var writeState = void 0; if (writeChunk) { setImmediate(function () { @@ -54,7 +54,7 @@ function writeChunks(remainingChunks, callback) { // do an initial write w.write('stuff'); // the write was immediate -assert.equal(seenChunks.length, 1); +assert.strictEqual(seenChunks.length, 1); // reset the seen chunks seenChunks = []; @@ -64,7 +64,7 @@ w.cork(); // write the bufferedChunks writeChunks(inputChunks, function () { // should not have seen anything yet - assert.equal(seenChunks.length, 0); + assert.strictEqual(seenChunks.length, 0); // trigger flush and ending the stream w.end(); @@ -73,7 +73,7 @@ writeChunks(inputChunks, function () { assert.ok(!seenEnd); // buffered bytes should be seen in current tick - assert.equal(seenChunks.length, 4); + assert.strictEqual(seenChunks.length, 4); // did the chunks match for (var i = 0, l = expectedChunks.length; i < l; i++) { diff --git a/test/parallel/test-stream3-cork-uncork.js b/test/parallel/test-stream3-cork-uncork.js index ce74211523..e6b1401f9f 100644 --- a/test/parallel/test-stream3-cork-uncork.js +++ b/test/parallel/test-stream3-cork-uncork.js @@ -22,7 +22,7 @@ var w = new Writable(); // lets arrange to store the chunks w._write = function (chunk, encoding, cb) { // default encoding given none was specified - assert.equal(encoding, 'buffer'); + assert.strictEqual(encoding, 'buffer'); seenChunks.push(chunk); cb(); @@ -34,7 +34,7 @@ w.on('finish', function () { function writeChunks(remainingChunks, callback) { var writeChunk = remainingChunks.shift(); - var writeState; + var writeState = void 0; if (writeChunk) { setImmediate(function () { @@ -52,7 +52,7 @@ function writeChunks(remainingChunks, callback) { // do an initial write w.write('stuff'); // the write was immediate -assert.equal(seenChunks.length, 1); +assert.strictEqual(seenChunks.length, 1); // reset the chunks seen so far seenChunks = []; @@ -62,13 +62,13 @@ w.cork(); // write the bufferedChunks writeChunks(inputChunks, function () { // should not have seen anything yet - assert.equal(seenChunks.length, 0); + assert.strictEqual(seenChunks.length, 0); // trigger writing out the buffer w.uncork(); // buffered bytes shoud be seen in current tick - assert.equal(seenChunks.length, 4); + assert.strictEqual(seenChunks.length, 4); // did the chunks match for (var i = 0, l = expectedChunks.length; i < l; i++) { diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js index 8d54bed54e..348cce68ea 100644 --- a/test/parallel/test-stream3-pause-then-read.js +++ b/test/parallel/test-stream3-pause-then-read.js @@ -41,7 +41,7 @@ function readn(n, then) { (function read() { var c = r.read(n); if (!c) r.once('readable', read);else { - assert.equal(c.length, n); + assert.strictEqual(c.length, n); assert(!r._readableState.flowing); then(); } @@ -80,7 +80,7 @@ function pipeLittle() { var w = new Writable(); var written = 0; w.on('finish', function () { - assert.equal(written, 200); + assert.strictEqual(written, 200); setImmediate(read1234); }); w._write = function (chunk, encoding, cb) { @@ -132,8 +132,8 @@ function pipe() { }; w.on('finish', function () { console.error('written', written, totalPushed); - assert.equal(written, expectEndingData); - assert.equal(totalPushed, expectTotalData); + assert.strictEqual(written, expectEndingData); + assert.strictEqual(totalPushed, expectTotalData); console.log('ok'); }); r.pipe(w);