diff --git a/.airtap.yml b/.airtap.yml
deleted file mode 100644
index fe0435ff1e..0000000000
--- a/.airtap.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-providers:
- - airtap-sauce
-
-browsers:
- - name: chrome
- - name: ie
- - name: firefox
- - name: safari
- - name: edge
-
-presets:
- local:
- providers:
- - airtap-playwright
- browsers:
- - name: chromium
- - name: firefox
- - name: webkit
diff --git a/.babelrc b/.babelrc
deleted file mode 100644
index 465861f3d1..0000000000
--- a/.babelrc
+++ /dev/null
@@ -1,17 +0,0 @@
-{
- "presets": [
- ["@babel/preset-env", {
- "targets": [
- "last 2 versions",
- "not dead",
- "node 6.0"
- ],
- "modules": "commonjs",
- "exclude": [
- "transform-regenerator",
- "transform-typeof-symbol"
- ],
- "debug": true
- }]
- ]
-}
diff --git a/.eslintrc.js b/.eslintrc.js
new file mode 100644
index 0000000000..76499b9ad6
--- /dev/null
+++ b/.eslintrc.js
@@ -0,0 +1,23 @@
+module.exports = {
+ parserOptions: {
+ ecmaVersion: 'latest'
+ },
+ extends: ['standard'],
+ rules: {
+ /*
+ This is inserted to make this compatible with prettier.
+ Once https://github.com/prettier/prettier/issues/3845 and https://github.com/prettier/prettier/issues/3847 are solved this might be not needed any more.
+ */
+ 'space-before-function-paren': 0,
+ curly: [2, 'all']
+ },
+ overrides: [
+ {
+ files: ['**/*.mjs'],
+ parserOptions: {
+ ecmaVersion: 'latest',
+ sourceType: 'module'
+ }
+ }
+ ]
+}
diff --git a/.github/workflows/browsers.yml b/.github/workflows/browsers.yml
new file mode 100644
index 0000000000..ac2b9e6343
--- /dev/null
+++ b/.github/workflows/browsers.yml
@@ -0,0 +1,39 @@
+name: Browsers
+
+on: [push, pull_request]
+
+jobs:
+ build:
+ name: Browsers
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: ['ubuntu-latest', 'windows-latest', 'macos-latest']
+ browser: ['chrome', 'firefox', 'safari', 'edge']
+ bundler: ['browserify', 'esbuild', 'rollup', 'webpack']
+ exclude:
+ - os: ubuntu-latest
+ browser: safari
+ - os: windows-latest
+ browser: safari
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Use Node.js 16
+ uses: actions/setup-node@v3
+ with:
+ node-version: 16
+ - name: Restore cached dependencies
+ uses: actions/cache@v3
+ with:
+ path: node_modules
+ key: node-modules-${{ matrix.os }}-${{ hashFiles('package.json') }}
+ - name: Install dependencies
+ run: npm install
+ - name: Install Browser
+ run: ./node_modules/.bin/playwright install ${{ fromJSON('{"chrome":"chromium","edge":"msedge","firefox":"firefox","safari":"webkit"}')[matrix.browser] }}
+ - name: Bundle code
+ run: npm run test:prepare ${{ matrix.bundler }}
+ - name: Run Tests on Browsers
+ run: npm run test:browsers ${{ matrix.browser }} ${{ matrix.bundler }}
diff --git a/.github/workflows/bundlers.yml b/.github/workflows/bundlers.yml
new file mode 100644
index 0000000000..49f437082b
--- /dev/null
+++ b/.github/workflows/bundlers.yml
@@ -0,0 +1,32 @@
+name: Bundlers
+
+on: [push, pull_request]
+
+jobs:
+ build:
+ name: Bundlers
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest, windows-latest, macos-latest]
+ node-version: [12.x, 14.x, 16.x, 18.x]
+ bundler: ['browserify', 'esbuild', 'rollup', 'webpack']
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }}
+ uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ - name: Restore cached dependencies
+ uses: actions/cache@v3
+ with:
+ path: node_modules
+ key: node-modules-${{ matrix.os }}-${{ hashFiles('package.json') }}
+ - name: Install dependencies
+ run: npm install
+ - name: Bundle code
+ run: npm run test:prepare ${{ matrix.bundler }}
+ - name: Run Tests on Browsers
+ run: npm run test:bundlers ${{ matrix.bundler }}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
deleted file mode 100644
index f49edcaecf..0000000000
--- a/.github/workflows/ci.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-name: Node.js
-
-on: [push, pull_request]
-
-jobs:
- build:
- runs-on: ${{ matrix.os }}
- strategy:
- fail-fast: false
- matrix:
- os: [ubuntu-latest, windows-latest, macOS-latest]
- node-version: [6.x, 8.x, 10.x, 12.x, 14.x]
- steps:
- - uses: actions/checkout@v1
- - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }}
- uses: actions/setup-node@v1
- with:
- node-version: ${{ matrix.node-version }}
- - name: npm install
- run: npm install
- - name: npm run test
- run: npm run test
diff --git a/.github/workflows/node.yml b/.github/workflows/node.yml
new file mode 100644
index 0000000000..5af0dab0eb
--- /dev/null
+++ b/.github/workflows/node.yml
@@ -0,0 +1,29 @@
+name: Node.js
+
+on: [push, pull_request]
+
+jobs:
+ build:
+ name: Node.js
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest, windows-latest, macos-latest]
+ node-version: [12.x, 14.x, 16.x, 18.x]
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }}
+ uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ - name: Restore cached dependencies
+ uses: actions/cache@v3
+ with:
+ path: node_modules
+ key: node-modules-${{ hashFiles('package.json') }}
+ - name: Install dependencies
+ run: npm install
+ - name: Run Tests
+ run: npm run coverage
diff --git a/.github/workflows/sauce.yml b/.github/workflows/sauce.yml
deleted file mode 100644
index a13fc719a6..0000000000
--- a/.github/workflows/sauce.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-name: Sauce Labs
-on: push
-jobs:
- test:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v1
- - name: Set up node
- uses: actions/setup-node@v1
- with:
- node-version: 14
- - name: Install
- run: npm install
- env:
- # Download Sauce Connect binary now instead of on first run
- SAUCE_CONNECT_DOWNLOAD_ON_INSTALL: true
- - name: Add host
- run: echo "127.0.0.1 airtap.local" | sudo tee -a /etc/hosts
- - name: Test
- run: npm run test-browsers
- env:
- SAUCE_USERNAME: ${{ secrets.SAUCE_USERNAME }}
- SAUCE_ACCESS_KEY: ${{ secrets.SAUCE_ACCESS_KEY }}
diff --git a/.gitignore b/.gitignore
index 098013154a..dd386b93a3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,8 +1,5 @@
+coverage/
node_modules/
-.zuul.yml
-.nyc_output
-coverage
+node-*.tar.gz
package-lock.json
-*.tap
-.airtaprc
-yarn.lock
+tmp/
\ No newline at end of file
diff --git a/.npmignore b/.npmignore
deleted file mode 100644
index 3ba4f832ce..0000000000
--- a/.npmignore
+++ /dev/null
@@ -1,12 +0,0 @@
-build/
-test/
-examples/
-fs.js
-zlib.js
-.airtap.yml
-.airtaprc
-.babelrc
-.nyc_output
-.github
-coverage
-doc/
diff --git a/README.md b/README.md
index d03b064b9f..6731b46a47 100644
--- a/README.md
+++ b/README.md
@@ -1,24 +1,22 @@
# readable-stream
-***Node.js core streams for userland***
+**_Node.js core streams for userland_**
-[](https://www.npmjs.org/package/readable-stream)
+[](https://npm.im/readable-stream)
[](https://www.npmjs.org/package/readable-stream)
-
-
-
-[](https://saucelabs.com/u/readabe-stream)
+[](https://github.com/nodejs/readable-stream/actions?query=workflow%3ANode.js)
+[](https://github.com/nodejs/readable-stream/actions?query=workflow%3ABrowsers)
```bash
npm install --save readable-stream
```
-This package is a mirror of the streams implementations in Node.js.
+This package is a mirror of the streams implementations in Node.js 18.0.0.
-Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.19.0/docs/api/stream.html).
+Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v18.0.0/docs/api/stream.html).
If you want to guarantee a stable streams base, regardless of what version of
-Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
+Node you, or the users of your libraries are using, use **readable-stream** _only_ and avoid the _"stream"_ module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
As of version 2.0.0 **readable-stream** uses semantic versioning.
@@ -48,11 +46,8 @@ v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6,
https://github.com/nodejs/node/pull/17979
## Version 2.x.x
-v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11.
-### Big Thanks
-
-Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce]
+v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11.
# Usage
@@ -61,15 +56,8 @@ without any changes, if you are just using one of the main classes and
functions.
```js
-const {
- Readable,
- Writable,
- Transform,
- Duplex,
- pipeline,
- finished
-} = require('readable-stream')
-````
+const { Readable, Writable, Transform, Duplex, pipeline, finished } = require('readable-stream')
+```
Note that `require('stream')` will return `Stream`, while
`require('readable-stream')` will return `Readable`. We discourage using
@@ -106,23 +94,22 @@ module.exports = {
oversees the development and maintenance of the Streams API within
Node.js. The responsibilities of the Streams Working Group include:
-* Addressing stream issues on the Node.js issue tracker.
-* Authoring and editing stream documentation within the Node.js project.
-* Reviewing changes to stream subclasses within the Node.js project.
-* Redirecting changes to streams from the Node.js project to this
+- Addressing stream issues on the Node.js issue tracker.
+- Authoring and editing stream documentation within the Node.js project.
+- Reviewing changes to stream subclasses within the Node.js project.
+- Redirecting changes to streams from the Node.js project to this
project.
-* Assisting in the implementation of stream providers within Node.js.
-* Recommending versions of `readable-stream` to be included in Node.js.
-* Messaging about the future of streams to give the community advance
+- Assisting in the implementation of stream providers within Node.js.
+- Recommending versions of `readable-stream` to be included in Node.js.
+- Messaging about the future of streams to give the community advance
notice of changes.
+
## Team Members
-* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
-* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com>
+- **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
+- **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com>
- Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
-* **Robert Nagy** ([@ronag](https://github.com/ronag)) <ronagy@icloud.com>
-* **Vincent Weevers** ([@vweevers](https://github.com/vweevers)) <mail@vincentweevers.nl>
-
-[sauce]: https://saucelabs.com
+- **Robert Nagy** ([@ronag](https://github.com/ronag)) <ronagy@icloud.com>
+- **Vincent Weevers** ([@vweevers](https://github.com/vweevers)) <mail@vincentweevers.nl>
diff --git a/babel.config.cjs b/babel.config.cjs
new file mode 100644
index 0000000000..d675db2f15
--- /dev/null
+++ b/babel.config.cjs
@@ -0,0 +1,3 @@
+module.exports = {
+ plugins: ['@babel/proposal-nullish-coalescing-operator', '@babel/proposal-optional-chaining']
+}
diff --git a/build/.gitignore b/build/.gitignore
deleted file mode 100644
index 3c3629e647..0000000000
--- a/build/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
diff --git a/build/build.js b/build/build.js
deleted file mode 100755
index 3d80f2b5b4..0000000000
--- a/build/build.js
+++ /dev/null
@@ -1,194 +0,0 @@
-#!/usr/bin/env node
-
-const hyperquest = require('hyperquest')
- , bl = require('bl')
- , fs = require('fs')
- , path = require('path')
- , tar = require('tar-fs')
- , gunzip = require('gunzip-maybe')
- , babel = require('@babel/core')
- , glob = require('glob')
- , pump = require('pump')
- , rimraf = require('rimraf')
- , encoding = 'utf8'
- , urlRegex = /^https?:\/\//
- , nodeVersion = process.argv[2]
- , nodeVersionRegexString = '\\d+\\.\\d+\\.\\d+'
- , usageVersionRegex = RegExp('^' + nodeVersionRegexString + '$')
- , readmeVersionRegex =
- RegExp('((?:(?:Node-core )|(?:https\:\/\/nodejs\.org\/dist\/))v)' + nodeVersionRegexString, 'g')
-
- , readmePath = path.join(__dirname, '..', 'README.md')
- , files = require('./files')
- , testReplace = require('./test-replacements')
-
- , downloadurl = `https://nodejs.org/dist/v${nodeVersion}/node-v${nodeVersion}.tar.gz`
- , src = path.join(__dirname, `node-v${nodeVersion}`)
- , libsrcurl = path.join(src, 'lib/')
- , testsrcurl = path.join(src, 'test/parallel/')
- , libourroot = path.join(__dirname, '../lib/')
- , testourroot = path.join(__dirname, '../test/parallel/')
-
-
-if (!usageVersionRegex.test(nodeVersion)) {
- console.error('Usage: build.js xx.yy.zz')
- return process.exit(1);
-}
-
-// `inputLoc`: URL or local path.
-function processFile (inputLoc, out, replacements, addAtEnd) {
- var file = fs.createReadStream(inputLoc, encoding)
-
- file.pipe(bl(function (err, data) {
- if (err) throw err
-
- console.log('Processing', inputLoc)
- data = data.toString()
- replacements.forEach(function (replacement) {
- const regexp = replacement[0]
- var arg2 = replacement[1]
- if (typeof arg2 === 'function')
- arg2 = arg2.bind(data)
- if (arg2 === undefined) {
- console.error('missing second arg for file', inputLoc, replacement)
- throw new Error('missing second arg in replacement')
- }
- data = data.replace(regexp, arg2)
- })
-
- if (addAtEnd) {
- data += addAtEnd
- }
- if (inputLoc.slice(-3) === '.js') {
- try {
- const transformed = babel.transform(data, {
- // Required for babel to pick up .babelrc
- filename: inputLoc
- })
- data = transformed.code
- } catch (err) {
- fs.writeFile(out + '.errored.js', data, encoding, function () {
- console.log('Wrote errored', out)
-
- throw err
- })
- return
- }
- }
- fs.writeFile(out, data, encoding, function (err) {
- if (err) throw err
-
- console.log('Wrote', out)
- })
- }))
-}
-function deleteOldTests(){
- const files = fs.readdirSync(path.join(__dirname, '..', 'test', 'parallel'));
- for (let file of files) {
- let name = path.join(__dirname, '..', 'test', 'parallel', file);
- console.log('Removing', name);
- fs.unlinkSync(name);
- }
-}
-function processLibFile (file) {
- var replacements = files[file]
- , url = libsrcurl + file
- , out = path.join(libourroot, file)
-
- processFile(url, out, replacements)
-}
-
-
-function processTestFile (file) {
- var replacements = testReplace.all
- , url = testsrcurl + file
- , out = path.join(testourroot, file)
-
- if (testReplace[file])
- replacements = replacements.concat(testReplace[file])
-
- processFile(url, out, replacements, ';(function () { var t = require(\'tap\'); t.pass(\'sync run\'); })();var _list = process.listeners(\'uncaughtException\'); process.removeAllListeners(\'uncaughtException\'); _list.pop(); _list.forEach((e) => process.on(\'uncaughtException\', e));')
-}
-
-//--------------------------------------------------------------------
-// Download the release from nodejs.org
-console.log(`Downloading ${downloadurl}`)
-pump(
- hyperquest(downloadurl),
- gunzip(),
- tar.extract(__dirname),
- function (err) {
- if (err) {
- throw err
- }
-
- //--------------------------------------------------------------------
- // Grab & process files in ../lib/
-
- Object.keys(files).forEach(processLibFile)
-
-
- //--------------------------------------------------------------------
- // Discover, grab and process all test-stream* files on the given release
-
- glob(path.join(testsrcurl, 'test-@(stream|readable)*.js'), function (err, list) {
- if (err) {
- throw err
- }
-
- list.forEach(function (file) {
- file = path.basename(file)
- if (!/-wrap(?:-encoding)?\.js$/.test(file) &&
- file !== 'test-stream2-httpclient-response-end.js' &&
- file !== 'test-stream-base-no-abort.js' &&
- file !== 'test-stream-preprocess.js' &&
- file !== 'test-stream-inheritance.js' &&
- file !== 'test-stream-base-prototype-accessors.js' &&
- file !== 'test-stream-base-prototype-accessors-enumerability.js' &&
- file !== 'test-stream-wrap-drain.js' &&
- file !== 'test-stream-pipeline-http2.js' &&
- file !== 'test-stream-base-typechecking.js') {
- processTestFile(file)
- }
- })
- })
-
- //--------------------------------------------------------------------
- // Grab the nodejs/node test/common.js
-
- glob(path.join(src, 'test/common/*'), function (err, list) {
- if (err) {
- throw err
- }
-
- list.forEach(function (file) {
- file = path.basename(file)
- processFile(
- path.join(testsrcurl.replace(/parallel[/\\]$/, 'common/'), file)
- , path.join(testourroot.replace('parallel', 'common'), file)
- , testReplace['common.js']
- )
- })
- })
-
- //--------------------------------------------------------------------
- // Update Node version in README
- processFile(readmePath, readmePath, [
- [readmeVersionRegex, "$1" + nodeVersion]
- ])
- }
-)
-
-// delete the current contents of test/parallel so if node removes any tests
-// they are removed here
-deleteOldTests();
-
-process.once('beforeExit', function () {
- rimraf(src, function (err) {
- if (err) {
- throw err
- }
-
- console.log('Removed', src)
- })
-})
diff --git a/build/build.mjs b/build/build.mjs
new file mode 100644
index 0000000000..66d0a9e1c7
--- /dev/null
+++ b/build/build.mjs
@@ -0,0 +1,224 @@
+import { transform } from '@babel/core'
+import { createReadStream } from 'node:fs'
+import { mkdir, readdir, readFile, rm, writeFile } from 'node:fs/promises'
+import { dirname, resolve } from 'node:path'
+import process from 'node:process'
+import { finished } from 'node:stream/promises'
+import { fileURLToPath } from 'node:url'
+import prettier from 'prettier'
+import { Parse } from 'tar'
+import { request } from 'undici'
+import prettierConfig from '../prettier.config.cjs'
+import { aliases, skippedSources, sources } from './files.mjs'
+import { footers } from './footers.mjs'
+import { headers } from './headers.mjs'
+import { replacements } from './replacements.mjs'
+
+const baseMatcher = /^(?:lib|test)/
+
+function highlightFile(file, color) {
+ return `\x1b[${color}m${file.replace(process.cwd() + '/', '')}\x1b[0m`
+}
+
+function info(message) {
+ console.log(`\x1b[34m[INFO]\x1b[0m ${message}`)
+}
+
+async function extract(nodeVersion, tarFile) {
+ const sourcesMatcher = sources.map((s) => new RegExp(s))
+
+ info(`Extracting Node.js ${nodeVersion} tar file ...`)
+ const contents = []
+ const tarPrefix = `node-v${nodeVersion}/`
+ const parser = new Parse()
+
+ parser.on('entry', (entry) => {
+ const dst = entry.path.replace(tarPrefix, '')
+
+ if (
+ entry.type === 'Directory' ||
+ skippedSources.includes(dst) ||
+ !baseMatcher.test(dst) ||
+ !sourcesMatcher.some((s) => s.test(dst))
+ ) {
+ return entry.resume()
+ }
+
+ let buffer = Buffer.alloc(0)
+
+ entry.on('data', (chunk) => {
+ buffer = Buffer.concat([buffer, chunk])
+ })
+
+ entry.on('end', () => {
+ const content = buffer.toString('utf-8')
+
+ // Enqueue file
+ contents.push([dst, content])
+
+ // Some special cases when file aliasing is needed
+ if (aliases[dst]) {
+ for (const alias of aliases[dst]) {
+ contents.push([alias, content])
+ }
+ }
+ })
+
+ entry.resume()
+ })
+
+ await finished(tarFile.pipe(parser))
+ return contents
+}
+
+async function processFiles(contents) {
+ const replacementsKeys = Object.keys(replacements)
+ const headersKeys = Object.keys(headers)
+ const footersKeys = Object.keys(footers)
+
+ prettierConfig.parser = 'babel'
+
+ for (let [path, content] of contents) {
+ const modifications = []
+ const matchingReplacements = replacementsKeys.filter((k) => new RegExp(k).test(path))
+ const matchingHeaders = headersKeys.filter((k) => new RegExp(k).test(path))
+ const matchingFooters = footersKeys.filter((k) => new RegExp(k).test(path))
+
+ // Perform replacements
+ if (matchingReplacements.length) {
+ modifications.push(highlightFile('replacements', 33))
+
+ for (const matching of matchingReplacements) {
+ for (const [from, to] of replacements[matching]) {
+ content = content.replaceAll(new RegExp(from, 'gm'), to)
+ }
+ }
+ }
+
+ // Prepend headers
+ if (matchingHeaders.length) {
+ modifications.push(highlightFile('headers', 33))
+
+ for (const footerKey of matchingHeaders) {
+ for (const header of headers[footerKey]) {
+ content = header + content
+ }
+ }
+ }
+
+ // Append footers
+ if (matchingFooters.length) {
+ modifications.push(highlightFile('footers', 33))
+
+ for (const footerKey of matchingFooters) {
+ for (const footer of footers[footerKey]) {
+ content += footer
+ }
+ }
+ }
+
+ // Process the file through babel and prettier
+ if (path.endsWith('.js')) {
+ modifications.push(highlightFile('babel', 33), highlightFile('prettier', 33))
+ content = prettier.format(await transform(content).code.replaceAll('void 0', 'undefined'), prettierConfig)
+ }
+
+ if (!modifications.length) {
+ modifications.push('no modifications')
+ }
+
+ // Write the file
+ info(`Creating file ${highlightFile(path, 32)} (${modifications.join(', ')}) ...`)
+ await writeFile(path, content, 'utf-8')
+ }
+}
+
+async function downloadNode(nodeVersion) {
+ // Download node
+ const downloadUrl = `https://nodejs.org/v${nodeVersion}/node-v${nodeVersion}.tar.gz`
+ info(`Downloading ${downloadUrl} ...`)
+ const { statusCode, body } = await request(downloadUrl, { pipelining: 0 })
+
+ if (statusCode !== 200) {
+ info(`Downloading failed with HTTP code ${statusCode}.`)
+ process.exit(1)
+ }
+
+ return body
+}
+
+async function main() {
+ const __dirname = fileURLToPath(new URL('.', import.meta.url))
+ const rootDir = resolve(__dirname, '..')
+
+ if (process.cwd() !== rootDir) {
+ console.error('Please run this from the root directory of readable-stream repository.')
+ return process.exit(1)
+ }
+
+ const nodeVersion = process.argv[2]
+
+ if (!nodeVersion?.match(/^\d+\.\d+\.\d+/)) {
+ console.error('Usage: build.js xx.yy.zz [node.tar.gz]')
+ return process.exit(1)
+ }
+
+ // Cleanup existing folder
+ await rm('lib', { recursive: true, force: true })
+ await rm('test', { recursive: true, force: true })
+
+ // Download or open the tar file
+ let tarFile
+
+ if (process.argv[3]) {
+ tarFile = createReadStream(process.argv[3])
+ } else {
+ tarFile = await downloadNode(nodeVersion)
+ }
+
+ // Extract contents
+ const contents = await extract(nodeVersion, tarFile)
+
+ // Update Node version in README.md
+ replacements['README.md'][0][1] = replacements['README.md'][0][1].replace('$2', nodeVersion)
+ replacements['README.md'][1][1] = replacements['README.md'][1][1].replace('$2', nodeVersion)
+
+ // Add custom files
+ contents.push(['lib/ours/browser.js', await readFile('src/browser.js', 'utf-8')])
+ contents.push(['lib/ours/index.js', await readFile('src/index.js', 'utf-8')])
+ contents.push(['lib/ours/errors.js', await readFile('src/errors.js', 'utf-8')])
+ contents.push(['lib/ours/primordials.js', await readFile('src/primordials.js', 'utf-8')])
+ contents.push(['lib/ours/util.js', await readFile('src/util.js', 'utf-8')])
+
+ for (const file of await readdir('src/test/ours')) {
+ contents.push([`test/ours/${file}`, await readFile(`src/test/ours/${file}`, 'utf-8')])
+ }
+
+ for (const file of await readdir('src/test/browser')) {
+ if (file.endsWith('fixtures')) {
+ continue
+ }
+
+ contents.push([`test/browser/${file}`, await readFile(`src/test/browser/${file}`, 'utf-8')])
+ }
+
+ for (const file of await readdir('src/test/browser/fixtures')) {
+ contents.push([`test/browser/fixtures/${file}`, await readFile(`src/test/browser/fixtures/${file}`, 'utf-8')])
+ }
+
+ contents.push(['README.md', await readFile('./README.md', 'utf-8')])
+
+ // Create paths
+ const paths = new Set(contents.map((c) => dirname(c[0])))
+ paths.delete('.')
+
+ for (const path of paths.values()) {
+ info(`Creating directory ${highlightFile(path, 32)} ...`)
+ await mkdir(path, { recursive: true, force: true })
+ }
+
+ // Perform replacements
+ await processFiles(contents)
+}
+
+await main()
diff --git a/build/common-replacements.js b/build/common-replacements.js
deleted file mode 100644
index e17f5d7910..0000000000
--- a/build/common-replacements.js
+++ /dev/null
@@ -1,59 +0,0 @@
-module.exports.altForEachImplReplacement = [
- /$/
- , '\nfunction forEach (xs, f) {\n'
- + ' for (var i = 0, l = xs.length; i < l; i++) {\n'
- + ' f(xs[i], i);\n'
- + ' }\n'
- + '}\n'
-]
-
-module.exports.altForEachUseReplacement = [
- /(\W)([\w\.\(\),\[\] ']+)(\.forEach\()/gm
- , '$1forEach($2, '
-]
-
-module.exports.specialForEachReplacment = [
- /(\W)(\[(?:\d\,\s)+\d\])(\.forEach\()/gm
- , '$1forEach($2, '
-]
-
-module.exports.altIndexOfImplReplacement = [
- /$/
- , '\nfunction indexOf (xs, x) {\n'
- + ' for (var i = 0, l = xs.length; i < l; i++) {\n'
- + ' if (xs[i] === x) return i;\n'
- + ' }\n'
- + ' return -1;\n'
- + '}\n'
-]
-
-module.exports.altIndexOfUseReplacement = [
- /(\W)([\w\.\(\),\[\]]+)(\.indexOf\()/gm
- , '$1indexOf($2, '
-]
-module.exports.objectKeysDefine = [
- /^('use strict';)$/m
- , '$1\n\n/**/\nvar objectKeys = Object.keys || function (obj) {\n'
- + ' var keys = [];\n'
- + ' for (var key in obj) keys.push(key);\n'
- + ' return keys;\n'
- + '}\n/**/\n'
-]
-
-module.exports.objectKeysReplacement = [
- /Object\.keys/g
- , 'objectKeys'
- ]
-
-
-module.exports.bufferShimFix = [
- /^('use strict';)$/m,
- `/**/
- const bufferShim = require('safe-buffer').Buffer;
- /**/`
-]
-
-module.exports.bufferStaticMethods = [
- /Buffer\.((?:alloc)|(?:allocUnsafe)|(?:from))/g,
- `bufferShim.$1`
-]
diff --git a/build/files.js b/build/files.js
deleted file mode 100644
index 96d5210947..0000000000
--- a/build/files.js
+++ /dev/null
@@ -1,381 +0,0 @@
-/* This file lists the files to be fetched from the node repo
- * in the /lib/ directory which will be placed in the ../lib/
- * directory after having each of the "replacements" in the
- * array for that file applied to it. The replacements are
- * simply the arguments to String#replace, so they can be
- * strings, regexes, functions.
- */
-
-const headRegexp = /(^module.exports = \w+;?)/m
-
- , requireReplacement = [
- /(require\(['"])(_stream_)/g
- , '$1./$2'
- ]
- , instanceofReplacement = [
- /instanceof Stream\.(\w+)/g
- , function (match, streamType) {
- return 'instanceof ' + streamType
- }
- ]
-
- // use the string_decoder in node_modules rather than core
- , stringDecoderReplacement = [
- /(require\(['"])(string_decoder)(['"]\))/g
- , '$1$2/$3'
- ]
-
- // The browser build ends up with a circular dependency, so the require is
- // done lazily, but cached.
- , addDuplexDec = [
- headRegexp
- , '$1\n\n/**/\nvar Duplex;\n/**/\n'
- ]
- , addDuplexRequire = [
- /^(function (?:Writable|Readable)(?:State)?.*{)/gm
- , '\n$1\n Duplex = Duplex || require(\'./_stream_duplex\');\n'
- ]
-
- , altIndexOfImplReplacement = require('./common-replacements').altIndexOfImplReplacement
- , altIndexOfUseReplacement = require('./common-replacements').altIndexOfUseReplacement
-
- , utilReplacement = [
- /^const util = require\('util'\);/m
- , ''
- ]
-
- , inherits = [
- /^util.inherits/m
- , 'require(\'inherits\')'
- ]
-
- , debugLogReplacement = [
- /const debug = util.debuglog\('stream'\);/
- , '\n\n/**/\nconst debugUtil = require(\'util\');\n'
- + 'let debug;\n'
- + 'if (debugUtil && debugUtil.debuglog) {\n'
- + ' debug = debugUtil.debuglog(\'stream\');\n'
- + '} else {\n'
- + ' debug = function () {};\n'
- + '}\n/**/\n'
- ]
-
- , deprecateReplacement = [
- /util.deprecate/
- , 'require(\'util-deprecate\')'
- ]
-
- , objectDefinePropertyReplacement = [
- /(Object\.defineProperties)/
- , 'if (Object.defineProperties) $1'
- ]
- , objectDefinePropertySingReplacement = [
- /Object\.defineProperty\(([\w\W]+?)\}\);/
- , '(function (){try {\n'
- + 'Object.defineProperty\($1});\n'
- + '}catch(_){}}());\n'
- ]
-
- , objectKeysDefine = require('./common-replacements').objectKeysDefine
-
- , objectKeysReplacement = require('./common-replacements').objectKeysReplacement
-
- , eventEmittterReplacement = [
- /^(const EE = require\('events'\));$/m
- , '/**/\n$1.EventEmitter;\n\n'
- + 'var EElistenerCount = function(emitter, type) {\n'
- + ' return emitter.listeners(type).length;\n'
- + '};\n/**/\n'
- ]
-
- , eventEmittterListenerCountReplacement = [
- /(EE\.listenerCount)/g
- , 'EElistenerCount'
- ]
-
- , bufferIsEncodingReplacement = [
- /Buffer.isEncoding\((\w+)\)/
- , '([\'hex\', \'utf8\', \'utf-8\', \'ascii\', \'binary\', \'base64\',\n'
- + '\'ucs2\', \'ucs-2\',\'utf16le\', \'utf-16le\', \'raw\']\n'
- + '.indexOf(($1 + \'\').toLowerCase()) > -1)'
- ]
-
- , requireStreamReplacement = [
- /const Stream = require\('stream'\);/
- , '\n\n/**/\n'
- + 'var Stream = require(\'./internal/streams/stream\')'
- + '\n/**/\n'
- ]
-
- , isBufferReplacement = [
- /(\w+) instanceof Buffer/g
- , 'Buffer.isBuffer($1)'
- ]
-
- , internalUtilReplacement = [
- /^const internalUtil = require\('internal\/util'\);/m
- , '\n/**/\nconst internalUtil = {\n deprecate: require(\'util-deprecate\')\n};\n'
- + '/**/\n'
- ]
- , internalDirectory = [
- /require\('internal\/streams\/([a-zA-z]+)'\)/g,
- 'require(\'./internal/streams/$1\')'
- ]
- , fixInstanceCheck = [
- /if \(typeof Symbol === 'function' && Symbol\.hasInstance\) \{/,
- `if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {`
- ]
- , removeOnWriteBind = [
- /onwrite\.bind\([^)]+?\)/
- , `function(er) { onwrite(stream, er); }`
- ]
- , addUintStuff = [
- /(?:var|const) (?:{ )Buffer(?: }) = require\('buffer'\)(?:\.Buffer)?;/g
- , `
- const Buffer = require('buffer').Buffer
- const OurUint8Array = global.Uint8Array || function () {}
-function _uint8ArrayToBuffer(chunk) {
- return Buffer.from(chunk);
-}
-function _isUint8Array(obj) {
- return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
-}
- `
- ]
- , addConstructors = [
- headRegexp
- , `$1
-
-/* */
-function WriteReq(chunk, encoding, cb) {
- this.chunk = chunk;
- this.encoding = encoding;
- this.callback = cb;
- this.next = null;
-}
-
-// It seems a linked list but it is not
-// there will be only 2 of these for each stream
-function CorkedRequest(state) {
- this.next = null;
- this.entry = null;
- this.finish = () => { onCorkedFinish(this, state) };
-}
-/* */
-`
- ]
- , useWriteReq = [
- /state\.lastBufferedRequest = \{.+?\}/g
- , `state.lastBufferedRequest = new WriteReq(chunk, encoding, cb)`
- ]
- , useCorkedRequest = [
- /var corkReq = [\s\S]+?(.+?)\.corkedRequestsFree = corkReq/g
- , `$1.corkedRequestsFree = new CorkedRequest($1)`
- ]
- , fixUintStuff = [
- /Stream\.(_isUint8Array|_uint8ArrayToBuffer)\(/g
- , `$1(`
- ]
- , fixBufferCheck = [
- /Object\.getPrototypeOf\((chunk)\) !== Buffer\.prototype/g
- , '!Buffer.isBuffer($1)'
- ]
- , errorsOneLevel = [
- /internal\/errors/
- , '../errors'
- ]
- , errorsTwoLevel = [
- /internal\/errors/
- , '../../../errors'
- ]
- , warnings = [
- /^const { emitExperimentalWarning } = require\('internal\/util'\);/m,
- 'const { emitExperimentalWarning } = require(\'../experimentalWarning\');'
- ]
- , numberIE11 = [
- /Number\.isNaN\(n\)/g
- , 'n !== n'
- ]
- , integerIE11 = [
- /Number\.isInteger\(hwm\)/g
- , '(isFinite(hwm) && Math.floor(hwm) === hwm)'
- ]
- , noAsyncIterators1 = [
- /Readable\.prototype\[Symbol\.asyncIterator\] = function\(\) \{/g
- , 'if (typeof Symbol === \'function\' ) {\nReadable.prototype[Symbol.asyncIterator] = function () {'
- ]
- , noAsyncIterators2 = [
- /return createReadableStreamAsyncIterator\(this\);\n};/m
- , 'return createReadableStreamAsyncIterator(this);\n};\n}'
- ]
- , noAsyncIteratorsFrom1 = [
- /Readable\.from = function *\(iterable, opts\) \{/g
- , 'if (typeof Symbol === \'function\' ) {\nReadable.from = function (iterable, opts) {'
- ]
- , noAsyncIteratorsFrom2 = [
- /return from\(Readable, iterable, opts\);\n};/m
- , 'return from(Readable, iterable, opts);\n};\n}'
- ]
- , once = [
- /const \{ once \} = require\('internal\/util'\);/
- , 'function once(callback) { let called = false; return function(...args) { if (called) return; called = true; callback(...args); }; }'
- ]
-
-module.exports['_stream_duplex.js'] = [
- requireReplacement
- , instanceofReplacement
- , utilReplacement
- , inherits
- , stringDecoderReplacement
- , objectKeysReplacement
- , objectKeysDefine
- , errorsOneLevel
-]
-
-module.exports['_stream_passthrough.js'] = [
- requireReplacement
- , instanceofReplacement
- , utilReplacement
- , inherits
- , stringDecoderReplacement
- , errorsOneLevel
-]
-
-module.exports['_stream_readable.js'] = [
- addDuplexRequire
- , addDuplexDec
- , requireReplacement
- , instanceofReplacement
- , altIndexOfImplReplacement
- , altIndexOfUseReplacement
- , stringDecoderReplacement
- , debugLogReplacement
- , utilReplacement
- , inherits
- , stringDecoderReplacement
- , eventEmittterReplacement
- , requireStreamReplacement
- , isBufferReplacement
- , eventEmittterListenerCountReplacement
- , internalDirectory
- , fixUintStuff
- , addUintStuff
- , errorsOneLevel
- , warnings
- , numberIE11
- , noAsyncIterators1
- , noAsyncIterators2
- , noAsyncIteratorsFrom1
- , noAsyncIteratorsFrom2
-]
-
-module.exports['_stream_transform.js'] = [
- requireReplacement
- , instanceofReplacement
- , utilReplacement
- , inherits
- , stringDecoderReplacement
- , errorsOneLevel
-]
-
-module.exports['_stream_writable.js'] = [
- addDuplexRequire
- , addDuplexDec
- , requireReplacement
- , instanceofReplacement
- , utilReplacement
- , inherits
- , stringDecoderReplacement
- , debugLogReplacement
- , deprecateReplacement
- , objectDefinePropertyReplacement
- , objectDefinePropertySingReplacement
- , bufferIsEncodingReplacement
- , [ /^var assert = require\('assert'\);$/m, '' ]
- , requireStreamReplacement
- , isBufferReplacement
- , internalUtilReplacement
- , fixInstanceCheck
- , removeOnWriteBind
- , internalDirectory
- , fixUintStuff
- , addUintStuff
- , fixBufferCheck
- , useWriteReq
- , useCorkedRequest
- , addConstructors
- , errorsOneLevel
-]
-
-module.exports['internal/streams/buffer_list.js'] = [
- [
- /inspect.custom/g,
- 'custom'
- ],
- [
- /const \{ inspect \} = require\('util'\);/,
- `
-const { inspect } = require('util')
-const custom = inspect && inspect.custom || 'inspect'
- `
- ]
-]
-module.exports['internal/streams/destroy.js'] = [
- errorsTwoLevel
-]
-
-module.exports['internal/streams/state.js'] = [
- , errorsTwoLevel
- , integerIE11
-]
-
-module.exports['internal/streams/async_iterator.js'] = [
- , errorsTwoLevel
- , [
- /internal\/streams\/end-of-stream/,
- './end-of-stream'
- ]
- , [
- /const AsyncIteratorPrototype = Object\.getPrototypeOf\(\n.*Object\.getPrototypeOf\(async function\* \(\) \{\}\).prototype\);/m,
- 'const AsyncIteratorPrototype = Object\.getPrototypeOf(function () {})'
- ]
- , [
- / return\(\)/,
- '[Symbol.asyncIterator]() { return this },\n return\(\)'
- ]
-]
-
-module.exports['internal/streams/end-of-stream.js'] = [
- , errorsTwoLevel
- , [
- /const \{ once \} = require\('internal\/util'\);/,
- `function once(callback) {
- let called = false;
- return function(...args) {
- if (called) return;
- called = true;
- callback.apply(this, args);
- };
-}`
- ]
-]
-
-module.exports['internal/streams/pipeline.js'] = [
- once
- , errorsTwoLevel
- , [
- /require\('internal\/streams\/end-of-stream'\)/,
- 'require(\'.\/end-of-stream\')'
- ]
-]
-
-module.exports['internal/streams/from.js'] = [
- errorsTwoLevel
- , [
- /if \(iterable && iterable\[Symbol.asyncIterator\]\)/
- , `if (iterable && typeof iterable.next === 'function') {
- iterator = iterable
- }
-else if (iterable && iterable[Symbol.asyncIterator])`
- ]
-]
diff --git a/build/files.mjs b/build/files.mjs
new file mode 100644
index 0000000000..84c17d975c
--- /dev/null
+++ b/build/files.mjs
@@ -0,0 +1,29 @@
+export const sources = [
+ 'lib/_stream_.+',
+ 'lib/internal/streams/.+',
+ 'lib/internal/validators.js',
+ 'lib/stream.js',
+ 'lib/stream/promises.js',
+ 'test/common/fixtures.js',
+ 'test/common/fixtures.mjs',
+ 'test/common/index.js',
+ 'test/common/index.mjs',
+ 'test/common/tmpdir.js',
+ 'test/fixtures/[^/]+.txt',
+ 'test/parallel/test-readable.+',
+ 'test/parallel/test-stream.+'
+]
+
+export const skippedSources = [
+ 'lib/_stream_wrap.js',
+ 'test/parallel/test-stream-consumers.js',
+ 'test/parallel/test-stream-destroy.js',
+ 'test/parallel/test-stream-map.js',
+ 'test/parallel/test-stream-pipeline.js',
+ 'test/parallel/test-stream-readable-async-iterators.js',
+ 'test/parallel/test-stream-wrap-drain.js',
+ 'test/parallel/test-stream-wrap-encoding.js',
+ 'test/parallel/test-stream-wrap.js'
+]
+
+export const aliases = {}
diff --git a/build/footers.mjs b/build/footers.mjs
new file mode 100644
index 0000000000..d9316811e6
--- /dev/null
+++ b/build/footers.mjs
@@ -0,0 +1,24 @@
+const testTicksDisableHook = `
+ /* replacement start */
+ process.on('beforeExit', (code) => {
+ hook.disable();
+ });
+ /* replacement end */
+`
+
+const testParallel = `
+ /* replacement start */
+ process.on('beforeExit', (code) => {
+ if(code === 0) {
+ tap.pass('test succeeded');
+ } else {
+ tap.fail(\`test failed - exited code \${code}\`);
+ }
+ });
+ /* replacement end */
+`
+
+export const footers = {
+ 'test/parallel/test-stream-writable-samecb-singletick.js': testTicksDisableHook,
+ 'test/parallel/.+': testParallel
+}
diff --git a/build/headers.mjs b/build/headers.mjs
new file mode 100644
index 0000000000..12ef62db47
--- /dev/null
+++ b/build/headers.mjs
@@ -0,0 +1,21 @@
+const testPolyfills = `
+ /* replacement start */
+ const AbortController = globalThis.AbortController || require('abort-controller').AbortController;
+ const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal;
+ const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget;
+
+ if(typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function() {
+ const controller = new AbortController();
+ controller.abort();
+
+ return controller.signal;
+ }
+ }
+ /* replacement end */
+`
+
+export const headers = {
+ 'test/parallel/test-stream-(add-abort-signal|drop-take|duplex-destroy|flatMap|forEach|filter|finished|readable-destroy|reduce|toArray|writable-destroy).js':
+ [testPolyfills]
+}
diff --git a/build/replacements.mjs b/build/replacements.mjs
new file mode 100644
index 0000000000..0933cd1318
--- /dev/null
+++ b/build/replacements.mjs
@@ -0,0 +1,299 @@
+const legacyStreamsRequireStream = ["require\\('stream'\\)", "require('./stream')"]
+
+const internalStreamsBufferPolyfill = [
+ "'use strict'",
+ `
+ 'use strict'
+
+ const bufferModule = require('buffer');
+ `
+]
+
+const internalStreamsAbortControllerPolyfill = [
+ "'use strict'",
+ `
+ 'use strict'
+
+ const abortControllerModule = require('abort-controller');
+ `
+]
+
+const internalStreamsNoRequireBlob = [
+ "const \\{\\n isBlob,\\n\\} = require\\('internal/blob'\\);",
+ `
+ const Blob = globalThis.Blob || bufferModule.Blob;
+ const isBlob = typeof Blob !== 'undefined' ? function isBlob (b) { return b instanceof Blob } : function isBlob(b) { return false; }
+ `
+]
+
+const internalStreamsInspectCustom = ['inspect.custom', "Symbol.for('nodejs.util.inspect.custom')"]
+
+const internalStreamsNoRequireAbortController = [
+ 'const \\{ AbortController \\} = .+',
+ 'const AbortController = globalThis.AbortController || abortControllerModule.AbortController;'
+]
+
+const internalStreamsRequireInternal = ["require\\('internal/([^']+)'\\)", "require('../$1')"]
+
+const internalStreamsNoRequireBuffer = ["const \\{ Buffer \\} = require\\('buffer'\\);", '']
+
+const internalStreamsRequireErrors = ["require\\('internal/errors'\\)", "require('../../ours/errors')"]
+
+const internalStreamsRequireEventEmitter = ['const EE =', 'const { EventEmitter: EE } =']
+
+const internalStreamsRequirePrimordials = ['= primordials', "= require('../../ours/primordials')"]
+
+const internalStreamsRequireRelativeUtil = [
+ 'const \\{ (once|createDeferredPromise|) \\} = .+;',
+ "const { $1 } = require('../../ours/util');"
+]
+
+const internalStreamsRequireRelativeDuplex = ['instanceof Stream.Duplex', "instanceof require('./duplex')"]
+
+const internalStreamsRequireStream = ["require\\('stream'\\)", "require('../../stream')"]
+
+const internalStreamsRequireStreams = ["require\\('internal/streams/([^']+)'\\)", "require('./$1')"]
+
+const internalStreamsRequireUtil = [
+ "require\\('internal/util(?:/(?:debuglog|inspect))?'\\)",
+ "require('../../ours/util')"
+]
+
+const internalStreamsRequireUtilDebuglog = ["require\\('internal/util/debuglog'\\)", "require('../../ours/util')"]
+
+const internalStreamsRequireWebStream = ["require\\('internal/webstreams/adapters'\\)", '{}']
+
+const internalStreamsWeakHandler = [
+ "const \\{ kWeakHandler \\} = require\\('../event_target'\\);",
+ "const kWeakHandler = require('../../ours/primordials').Symbol('kWeak');"
+]
+
+const internalValidatorsNoCoalesceAssignment = [
+ '\\s*(.+) \\?\\?= (.+)',
+ `
+ if (typeof $1 === 'undefined') {
+ $1 = $2
+ }
+ `
+]
+
+const internalValidatorsNoRequireSignals = [
+ "const \\{ signals \\} = internalBinding\\('constants'\\).os;",
+ 'const signals = {};'
+]
+
+const internalValidatorsRequireAssert = ["require\\('internal/assert'\\)", "require('assert')"]
+
+const internalValidatorsRequireAsyncHooks = ["require\\('./async_hooks'\\)", "require('internal/async_hooks')"]
+
+const internalValidatorsRequireErrors = ["require\\('internal/errors'\\)", "require('../ours/errors')"]
+
+const internalValidatorsRequirePrimordials = ['= primordials', "= require('../ours/primordials')"]
+
+const internalValidatorsRequireRelativeUtil = ["require\\('internal/util'\\)", "require('../ours/util')"]
+
+const internalValidatorsRequireUtilTypes = ["require\\('internal/util/types'\\)", "require('../ours/util').types"]
+
+const streamIndexIsUint8Array = [
+ "Stream._isUint8Array = require\\('internal/util/types'\\).isUint8Array;",
+ `
+ Stream._isUint8Array = function isUint8Array(value) {
+ return value instanceof Uint8Array
+ };
+ `
+]
+
+const streamIndexRequireInternal = ["require\\('internal/([^']+)'\\)", "require('./internal/$1')"]
+
+const streamIndexRequireInternalBuffer = ["require\\('internal/buffer'\\)", '{}']
+
+const streamIndexRequireErrors = ["require\\('internal/errors'\\);", "require('./ours/errors');"]
+
+const streamIndexRequirePrimordials = ['= primordials', "= require('./ours/primordials')"]
+
+const streamIndexRequirePromises = ["require\\('stream/promises'\\);", "require('./stream/promises');"]
+
+const streamIndexRequireUtil = ["require\\('internal/util'\\)", "require('./ours/util')"]
+
+const streamIndexUint8ArrayToBuffer = ['new internalBuffer.FastBuffer', 'Buffer.from']
+
+const streamsRequireErrors = ["require\\('internal/errors'\\)", "require('../ours/errors')"]
+
+const streamsRequireInternal = ["require\\('internal/(.+)'\\)", "require('../internal/$1')"]
+
+const streamsRequirePrimordials = ['= primordials', "= require('../ours/primordials')"]
+
+const testCommonKnownGlobals = [
+ 'let knownGlobals = \\[(\\n\\s+)',
+ `
+ let knownGlobals = [\n
+ typeof AggregateError !== 'undefined' ? AggregateError : require('../../lib/ours/util').AggregateError,
+ typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController,
+ typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal,
+ typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget,
+ `
+]
+
+const testParallelBindings = [
+ "const \\{ internalBinding \\} = require\\('../../lib/internal/test/binding'\\);",
+ 'const internalBinding = process.binding'
+]
+
+const testParallelHasOwn = ['Object.hasOwn\\(', 'Reflect.has(']
+
+const testParallelIncludeTap = [
+ "('use strict')",
+ `
+ $1
+
+ const tap = require('tap');
+ const silentConsole = { log() {}, error() {} };
+ `
+]
+
+const testParallelImportStreamInMjs = [" from 'stream';", "from '../../lib/ours/index.js';"]
+
+const testParallelImportTapInMjs = ["(from 'assert';)", "$1\nimport tap from 'tap';"]
+
+const testParallelDuplexFromBlob = [
+ "const \\{ Blob \\} = require\\('buffer'\\);",
+ "const Blob = globalThis.Blob || require('buffer').Blob"
+]
+
+const testParallelDuplexSkipWithoutBlob = [
+ "(\\{\n const blob = new Blob\\(\\['blob'\\]\\))",
+ "if (typeof Blob !== 'undefined') $1"
+]
+
+const testParallelFinishedEvent = ["res.on\\('close", "res.on('finish"]
+
+const testParallelFlatMapWinLineSeparator = [
+ "'xyz\\\\n'\\.repeat\\(5\\)",
+ "(process.platform === 'win32' ? 'xyz\\r\\n' : 'xyz\\n').repeat(5)"
+]
+
+const testParallelPreprocessWinLineSeparator = [
+ 'assert.strictEqual\\(streamedData, modelData\\);',
+ "assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\\r\\n/g, '\\n') : modelData);"
+]
+
+const testParallelReadableBufferListInspect = [
+ 'assert.strictEqual\\(\\n\\s+util.inspect\\(\\[ list \\], \\{ compact: false \\}\\),\\n\\s+`\\[\\n\\s+BufferList \\{\\n\\s+head: \\[Object\\],\\n\\s+tail: \\[Object\\],\\n\\s+length: 4\\n\\s+\\}\\n\\]`\\);',
+ `
+ assert.strictEqual(typeof list.head, 'object');
+ assert.strictEqual(typeof list.tail, 'object');
+ assert.strictEqual(list.length, 4);
+ `
+]
+
+const testParallelRequireStream = ["require\\('stream'\\)", "require('../../lib/ours/index')"]
+
+const testParallelRequireStreamConsumer = ["require\\('stream/consumer'\\)", "require('../../lib/stream/consumer')"]
+
+const testParallelRequireStreamInternals = ["require\\('(internal/.+)'\\)", "require('../../lib/$1')"]
+
+const testParallelRequireStreamInternalsLegacy = ["require\\('(_stream_\\w+)'\\)", "require('../../lib/$1')"]
+
+const testParallelRequireStreamPromises = ["require\\('stream/promises'\\)", "require('../../lib/stream/promises')"]
+
+const testParallelRequireStreamWeb = ["require\\('stream/web'\\)", "require('../../lib/stream/web')"]
+
+const testParallelSilentConsole = ['console.(log|error)', 'silentConsole.$1']
+
+const testParallelTimersPromises = [
+ "const { setTimeout } = require\\('timers/promises'\\);",
+ `
+ const st = require('timers').setTimeout;
+
+ function setTimeout(ms) {
+ return new Promise(resolve => {
+ st(resolve, ms);
+ });
+ }
+ `
+]
+
+const testParallelTicksReenableConsoleLog = ['silentConsole.log\\(i\\);', 'console.log(i);']
+
+const testParallelTickSaveHook = ['async_hooks.createHook\\(\\{', 'const hook = async_hooks.createHook({']
+
+const readmeInfo = ['(This package is a mirror of the streams implementations in Node.js) (\\d+.\\d+.\\d+).', '$1 $2.']
+
+const readmeLink = ['(\\[Node.js website\\]\\(https://nodejs.org/dist/v)(\\d+.\\d+.\\d+)', '$1$2']
+
+export const replacements = {
+ 'lib/_stream.+': [legacyStreamsRequireStream],
+ 'lib/internal/streams/duplexify.+': [
+ internalStreamsBufferPolyfill,
+ internalStreamsAbortControllerPolyfill,
+ internalStreamsNoRequireBlob,
+ internalStreamsNoRequireAbortController
+ ],
+ 'lib/internal/streams/(operators|pipeline).+': [
+ internalStreamsAbortControllerPolyfill,
+ internalStreamsNoRequireAbortController
+ ],
+ 'lib/internal/streams/.+': [
+ internalStreamsNoRequireBuffer,
+ internalStreamsRequireErrors,
+ internalStreamsRequireEventEmitter,
+ internalStreamsRequirePrimordials,
+ internalStreamsRequireRelativeDuplex,
+ internalStreamsRequireRelativeUtil,
+ internalStreamsRequireStream,
+ internalStreamsRequireStreams,
+ internalStreamsRequireUtil,
+ internalStreamsRequireUtilDebuglog,
+ internalStreamsRequireWebStream,
+ internalStreamsRequireInternal,
+ internalStreamsWeakHandler,
+ internalStreamsInspectCustom
+ ],
+ 'lib/internal/validators.js': [
+ internalValidatorsRequireAssert,
+ internalValidatorsRequireAsyncHooks,
+ internalValidatorsRequireErrors,
+ internalValidatorsRequirePrimordials,
+ internalValidatorsRequireRelativeUtil,
+ internalValidatorsRequireUtilTypes,
+ internalValidatorsNoRequireSignals,
+ internalValidatorsNoCoalesceAssignment
+ ],
+ 'lib/stream.js': [
+ streamIndexIsUint8Array,
+ streamIndexUint8ArrayToBuffer,
+ streamIndexRequireInternalBuffer,
+ streamIndexRequireErrors,
+ streamIndexRequirePrimordials,
+ streamIndexRequirePromises,
+ streamIndexRequireUtil,
+ streamIndexRequireInternal
+ ],
+ 'lib/stream/.+': [streamsRequireErrors, streamsRequirePrimordials, streamsRequireInternal],
+ 'test/common/index.js': [testCommonKnownGlobals],
+ 'test/parallel/.+': [
+ testParallelIncludeTap,
+ testParallelRequireStream,
+ testParallelRequireStreamConsumer,
+ testParallelRequireStreamInternals,
+ testParallelRequireStreamInternalsLegacy,
+ testParallelRequireStreamPromises,
+ testParallelRequireStreamWeb,
+ testParallelImportStreamInMjs,
+ testParallelImportTapInMjs,
+ testParallelBindings,
+ testParallelHasOwn,
+ testParallelSilentConsole,
+ testParallelTimersPromises
+ ],
+ 'test/parallel/test-stream-duplex-from.js': [testParallelDuplexFromBlob, testParallelDuplexSkipWithoutBlob],
+ 'test/parallel/test-stream-finished.js': [testParallelFinishedEvent],
+ 'test/parallel/test-stream-flatMap.js': [testParallelFlatMapWinLineSeparator],
+ 'test/parallel/test-stream-preprocess.js': [testParallelPreprocessWinLineSeparator],
+ 'test/parallel/test-stream-writable-samecb-singletick.js': [
+ testParallelTicksReenableConsoleLog,
+ testParallelTickSaveHook
+ ],
+ 'test/parallel/test-stream2-readable-from-list.js': [testParallelReadableBufferListInspect],
+ 'README.md': [readmeInfo, readmeLink]
+}
diff --git a/build/test-replacements.js b/build/test-replacements.js
deleted file mode 100644
index eebdec5d67..0000000000
--- a/build/test-replacements.js
+++ /dev/null
@@ -1,473 +0,0 @@
-const altForEachImplReplacement = require('./common-replacements').altForEachImplReplacement
- , altForEachUseReplacement = require('./common-replacements').altForEachUseReplacement
- , altIndexOfImplReplacement = require('./common-replacements').altIndexOfImplReplacement
- , altIndexOfUseReplacement = require('./common-replacements').altIndexOfUseReplacement
- , objectKeysDefine =
- require('./common-replacements').objectKeysDefine
- , objectKeysReplacement =
- require('./common-replacements').objectKeysReplacement
- , bufferShimFix =
- require('./common-replacements').bufferShimFix
- , bufferStaticMethods =
- require('./common-replacements').bufferStaticMethods
- , specialForEachReplacment =
- require('./common-replacements').specialForEachReplacment
- , deepStrictEqual = [
- /util\.isDeepStrictEqual/,
- 'require(\'deep-strict-equal\')'
- ]
- , tapOk = [
- /console\.log\('ok'\);/g,
- 'require(\'tap\').pass();'
- ]
- , catchES7 = [
- /} catch {/,
- '} catch(_e) {'
- ]
- , catchES7OpenClose = [
- /} catch {}/,
- '} catch(_e) {}'
- ]
-
-
-module.exports.all = [
- [
- /require\(['"]stream['"]\)/g
- , 'require(\'../../\')'
- ]
-
- // some tests need stream.Stream but readable.js doesn't offer that
- // and we've undone it with the previous replacement
-
- , [
- /stream\.Stream|require\('\.\.\/\.\.\/'\)\.Stream/g
- , 'require(\'stream\').Stream'
- ]
-
- , [
- /require\(['"](_stream_\w+)['"]\)/g
- , 'require(\'../../lib/$1\')'
- ]
-
- , [
- /Stream.(Readable|Writable|Duplex|Transform|PassThrough)/g
- , 'require(\'../../\').$1'
- ]
- , bufferShimFix
- , bufferStaticMethods
- , [
- /require\(['"]assert['"]\)/g
- , 'require(\'assert/\')'
- ]
- , [
- /\/\/ Flags: .*/
- , ''
- ]
-]
-
-module.exports['test-stream2-basic.js'] = [
- altForEachImplReplacement
- , specialForEachReplacment
-]
-
-module.exports['test-stream2-objects.js'] = [
- altForEachImplReplacement
- , altForEachUseReplacement
-]
-
-module.exports['test-stream2-transform.js'] = [
- altForEachImplReplacement
- , altForEachUseReplacement
-]
-
-module.exports['test-stream2-writable.js'] = [
- altForEachImplReplacement
- , altForEachUseReplacement
- , [
- /'latin1',/g,
- '\'binary\','
- ]
-]
-
-module.exports['test-stream-big-packet.js'] = [
- altIndexOfImplReplacement
- , altIndexOfUseReplacement
-]
-
-module.exports['test-stream-end-paused.js'] = [
- [
- /console.log\('ok'\);/,
- ''
- ]
-]
-
-module.exports['common.js'] = [
- objectKeysDefine
- , objectKeysReplacement
- , altForEachImplReplacement
- , altForEachUseReplacement
- , deepStrictEqual
- , catchES7
- , catchES7OpenClose
- , [
- /require\('module'\)\.builtinModules\.includes\('worker_threads'\)/,
- 'false'
- ]
- , [
- /process.argv.length === 2/,
- 'false'
- ]
- , [
- /^( for \(var x in global\) \{|function leakedGlobals\(\) \{)$/m
- , ' /**/\n'
- + ' if (typeof constructor == \'function\')\n'
- + ' knownGlobals.push(constructor);\n'
- + ' if (typeof DTRACE_NET_SOCKET_READ == \'function\')\n'
- + ' knownGlobals.push(DTRACE_NET_SOCKET_READ);\n'
- + ' if (typeof DTRACE_NET_SOCKET_WRITE == \'function\')\n'
- + ' knownGlobals.push(DTRACE_NET_SOCKET_WRITE);\n'
- + ' if (global.__coverage__)\n'
- + ' knownGlobals.push(__coverage__);\n'
- + '\'console,clearImmediate,setImmediate,core,__core-js_shared__,Promise,Map,Set,WeakMap,WeakSet,Reflect,System,queueMicrotask,asap,Observable,regeneratorRuntime,_babelPolyfill\'.split(\',\').filter(function (item) { return typeof global[item] !== undefined}).forEach(function (item) {knownGlobals.push(global[item])})'
- + ' /**/\n\n$1'
- ]
-
- , [
- /(exports.mustCall[\s\S]*)/m
- , '$1\n'
- + 'if (!util._errnoException) {\n'
- + ' var uv;\n'
- + ' util._errnoException = function(err, syscall) {\n'
- + ' if (util.isUndefined(uv)) try { uv = process.binding(\'uv\'); } catch (e) {}\n'
- + ' var errname = uv ? uv.errname(err) : \'\';\n'
- + ' var e = new Error(syscall + \' \' + errname);\n'
- + ' e.code = errname;\n'
- + ' e.errno = errname;\n'
- + ' e.syscall = syscall;\n'
- + ' return e;\n'
- + ' };\n'
- + '}\n'
- ]
-
- , [
- /^if \(global\.ArrayBuffer\) \{([^\}]+)\}$/m
- , '/**/if (!process.browser) {'
- + '\nif \(global\.ArrayBuffer\) {$1}\n'
- + '}/**/\n'
- ]
- , [
- /^Object\.defineProperty\(([\w\W]+?)\}\)\;/mg
- , '/**/if (!process.browser) {'
- + '\nObject\.defineProperty($1});\n'
- + '}/**/\n'
- ]
- , [
- /if \(!process\.send\)/
- , 'if (!process.send && !process.browser)'
- ]
- , [
- /^/,
- `/**/
- require('@babel/polyfill');
- var util = require('util');
- for (var i in util) exports[i] = util[i];
- /**/`
- ],
- [
- /var regexp = `\^\(\\\\w\+\)\\\\s\+\\\\s\$\{port\}\/\$\{protocol\}\\\\s`;/,
- `var regexp = '^(\\w+)\\s+\\s' + port + '/' + protocol + '\\s';`
- ],
- [
- /require\(['"]stream['"]\)/g
- , 'require(\'../../\')'
- ],
- [
- /^var util = require\('util'\);/m
- , '\n/**/\nvar util = require(\'core-util-is\');\n'
- + 'util.inherits = require(\'inherits\');\n/**/\n'
- ],
- [
- /^const util = require\('util'\);/m
-, '\n/**/\nvar util = require(\'core-util-is\');\n'
- + 'util.inherits = require(\'inherits\');\n/**/\n'
-]
-, [
- /process\.binding\('timer_wrap'\)\.Timer;/,
- '{now: function (){}}'
-],
-[
- /(exports\.enoughTestCpu[^;]+;)/,
- '/*$1*/'
-],
-[
- /exports\.buildType/,
- '//exports.buildType'
-],
-[
- /require\('async_hooks'\)/,
- '/*require(\'async_hooks\')'
-],
-[
- /\}\).enable\(\);/,
- '}).enable();*/'
-],
-[
- /const async_hooks = require\('async_hooks'\)/,
- 'var async_hooks = require(\'async_\' + \'hooks\')'
-],
-[
- /(?:var|const) async_wrap = process\.binding\('async_wrap'\);\n.*(?:var|const) (?:{ )?kCheck(?: })? = async_wrap\.constants(?:\.kCheck)?;/gm,
- '// const async_wrap = process.binding(\'async_wrap\');\n' +
- ' // const kCheck = async_wrap.constants.kCheck;'
-],
-[
- /async_wrap\.async_hook_fields\[kCheck\] \+= 1;/,
- '// async_wrap.async_hook_fields[kCheck] += 1;'
-],
-[
- /os\.cpus\(\)/,
- 'os.cpus().length === 0 ? [{ speed: 1000 }] : os.cpus()'
-],
-[
- /const buildType = process.config.target_defaults.default_configuration;/,
- 'const buildType = \'readable-stream\';'
-],
-[
- /const hasCrypto = Boolean\(process.versions.openssl\);/,
- 'const hasCrypto = true;'
-]
-]
-
-// this test has some trouble with the nextTick depth when run
-// to stdout, it's also very noisy so we'll quiet it
-module.exports['test-stream-pipe-multi.js'] = [
- altForEachImplReplacement
- , altForEachUseReplacement
- , [
- /console\.error/g
- , '//console.error'
- ]
-
- , [
- /process\.nextTick/g
- , 'setImmediate'
- ]
-]
-
-// just noisy
-module.exports['test-stream2-large-read-stall.js'] = [
- [
- /console\.error/g
- , ';false && console.error'
- ]
-]
-
-module.exports['test-stream-pipe-cleanup.js'] = [
- [
- /(function Writable\(\) \{)/
- , '(function (){\nif (/^v0\\.8\\./.test(process.version))\n return\n\n$1'
- ]
- ,
- [
- /$/
- ,'}())'
- ]
-]
-
-module.exports['test-stream2-stderr-sync.js'] = [
- altForEachImplReplacement
- , altForEachUseReplacement
- , [
- // 'tty_wrap' is too different across node versions.
- // this bypasses it and replicates a console.error() test
- /(function child0\(\) \{)/
- , '$1\n'
- + ' return console.error(\'child 0\\nfoo\\nbar\\nbaz\');\n'
- ]
-]
-
-module.exports['test-stream-unshift-read-race.js'] = [
- [
- /data\.slice\(pos, pos \+ n\)/g,
- 'data.slice(pos, Math.min(pos + n, data.length))'
- ]
-]
-
-module.exports['test-stream-pipe-without-listenerCount.js'] = [
- [
- /require\(\'stream\'\)/g,
- 'stream'
- ]
-]
-
-module.exports['test-stream2-unpipe-drain.js'] = [
- [
- /^/,
- `(function () {\n`
- ],
- [
- /$/
- ,'}())'
- ]
-]
-
-module.exports['test-stream2-decode-partial.js'] = [
- [
- /readable\.push\(source\.slice\(4, 6\)\)/
- ,`readable.push(source.slice(4, source.length));`
- ]
-]
-
-
-module.exports['test-stream3-cork-uncork.js'] = module.exports['test-stream3-cork-end.js'] = [
- [
- /assert\.ok\(seen\.equals\(expected\)\);/,
- 'assert.deepEqual(seen, expected);'
- ]
-]
-module.exports['test-stream2-readable-from-list.js'] = [
- [
- /require\('internal\/streams\/buffer_list'\)/,
- 'require(\'../../lib/internal/streams/buffer_list\')'
- ],
- [
- /assert\.strictEqual\(\n *util.inspect\(\[ list \], \{ compact: false \}\),\n *`\[\n *BufferList \{\n *head: \[Object\],\n *tail: \[Object\],\n *length: 4\n *\}\n *\]`\);/m,
- 'assert.strictEqual(util.inspect([ list ], { compact: false }).indexOf(\'BufferList\') > 0, true)'
- ]
-]
-module.exports['test-stream-writev.js'] = [
- tapOk,
- [
- /console.log\(`# decode=/,
- 'require(\'tap\').test(`# decode='
- ]
-]
-
-module.exports['test-stream3-pause-then-read.js'] = [
- tapOk
-]
-
-module.exports['test-stream-unshift-read-race.js'] = [
- tapOk
-]
-
-module.exports['test-stream2-unpipe-leak.js'] = [
- tapOk
-]
-
-module.exports['test-stream2-compatibility.js'] = [
- tapOk
-]
-
-module.exports['test-stream-push-strings.js'] = [
- tapOk
-]
-
-module.exports['test-stream-unshift-empty-chunk.js'] = [
- tapOk
-]
-
-module.exports['test-stream2-pipe-error-once-listener.js'] = [
- tapOk
-]
-
-module.exports['test-stream-push-order.js'] = [
- tapOk
-]
-
-module.exports['test-stream2-push.js'] = [
- tapOk
-]
-
-module.exports['test-stream2-readable-empty-buffer-no-eof.js'] = [
- tapOk,
- [
- /case 3:\n(\s+)setImmediate\(r\.read\.bind\(r, 0\)\);/,
- 'case 3:\n$1setTimeout(r.read.bind(r, 0), 50);'
- ]
-]
-module.exports['test-stream-buffer-list.js'] = [
- [
- /require\('internal\/streams\/buffer_list'\);/,
- 'require(\'../../lib/internal/streams/buffer_list\');'
- ]
-]
-
-module.exports['test-stream-transform-constructor-set-methods.js'] = [
- [
- /Error: _transform\\\(\\\) is n/,
- 'Error: .*[Nn]'
- ]
-]
-
-module.exports['test-stream-unpipe-event.js'] = [
- [
- /^/,
- 'if (process.version.indexOf(\'v0.8\') === 0) { process.exit(0) }\n'
- ]
-]
-
-module.exports['test-stream-readable-flow-recursion.js'] = [
- tapOk,
- deepStrictEqual
-]
-
-module.exports['test-stream-readable-with-unimplemented-_read.js'] = [
- deepStrictEqual
-]
-
-module.exports['test-stream-writable-needdrain-state.js'] = [
- deepStrictEqual
-]
-
-module.exports['test-stream-readable-setEncoding-null.js'] = [
- deepStrictEqual
-]
-
-module.exports['test-stream-pipeline.js'] = [
- [
- /require\('http2'\)/g,
- '{ createServer() { return { listen() {} } } }'
- ],
- [
- /assert\.deepStrictEqual\(err, new Error\('kaboom'\)\);/g,
- 'assert.strictEqual(err.message, \'kaboom\');'
- ],
- [
- /cb\(new Error\('kaboom'\)\)/g,
- 'process.nextTick(cb, new Error(\'kaboom\'))'
- ],
- [
- /const \{ promisify \} = require\('util'\);/g,
- 'const promisify = require(\'util-promisify\');'
- ]
-]
-
-module.exports['test-stream-finished.js'] = [
- [
- /const \{ promisify \} = require\('util'\);/g,
- 'const promisify = require(\'util-promisify\');'
- ]
-]
-
-module.exports['test-stream-readable-async-iterators.js'] = [
- [
- /assert.rejects\(/g,
- '(function(f, e) { let success = false; f().then(function() { success = true; throw new Error(\'should not succeed\') }).catch(function(e2) { if (success) { throw e2; } assert.strictEqual(e.message, e2.message); })})('
- ],
- [
- /tests\(\).then\(common\.mustCall\(\)\)/,
- 'tests().then(common.mustCall(), common.mustNotCall(console.log))'
- ],
- [
- /const AsyncIteratorPrototype = Object\.getPrototypeOf\(\n.*Object\.getPrototypeOf\(async function\* \(\) \{\}\).prototype\);/m,
- 'const AsyncIteratorPrototype = Object\.getPrototypeOf(function () {})'
- ]
-]
-
-module.exports['test-readable-from.js'] = [
- [
- /const \{ once \} = require\('events'\);/
- , 'const once = require(\'events.once\');'
- ]
-]
diff --git a/c8.json b/c8.json
new file mode 100644
index 0000000000..ea07a2272a
--- /dev/null
+++ b/c8.json
@@ -0,0 +1,9 @@
+{
+ "include": ["lib"],
+ "reporter": ["text", "html"],
+ "check-coverage": true,
+ "branches": 50,
+ "functions": 50,
+ "lines": 50,
+ "statements": 50
+}
\ No newline at end of file
diff --git a/doc/wg-meetings/2015-01-30.md b/doc/wg-meetings/2015-01-30.md
deleted file mode 100644
index 83275f192e..0000000000
--- a/doc/wg-meetings/2015-01-30.md
+++ /dev/null
@@ -1,60 +0,0 @@
-# streams WG Meeting 2015-01-30
-
-## Links
-
-* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg
-* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106
-* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/
-
-## Agenda
-
-Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting.
-
-* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105)
-* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101)
-* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102)
-* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99)
-
-## Minutes
-
-### adopt a charter
-
-* group: +1's all around
-
-### What versioning scheme should be adopted?
-* group: +1’s 3.0.0
-* domenic+group: pulling in patches from other sources where appropriate
-* mikeal: version independently, suggesting versions for io.js
-* mikeal+domenic: work with TC to notify in advance of changes
-simpler stream creation
-
-### streamline creation of streams
-* sam: streamline creation of streams
-* domenic: nice simple solution posted
- but, we lose the opportunity to change the model
- may not be backwards incompatible (double check keys)
-
- **action item:** domenic will check
-
-### remove implicit flowing of streams on(‘data’)
-* add isFlowing / isPaused
-* mikeal: worrying that we’re documenting polyfill methods – confuses users
-* domenic: more reflective API is probably good, with warning labels for users
-* new section for mad scientists (reflective stream access)
-* calvin: name the “third state”
-* mikeal: maybe borrow the name from whatwg?
-* domenic: we’re missing the “third state”
-* consensus: kind of difficult to name the third state
-* mikeal: figure out differences in states / compat
-* mathias: always flow on data – eliminates third state
- * explore what it breaks
-
-**action items:**
-* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream)
-* ask rod/build for infrastructure
-* **chris**: explore the “flow on data” approach
-* add isPaused/isFlowing
-* add new docs section
-* move isPaused to that section
-
-
diff --git a/errors-browser.js b/errors-browser.js
deleted file mode 100644
index fb8e73e189..0000000000
--- a/errors-browser.js
+++ /dev/null
@@ -1,127 +0,0 @@
-'use strict';
-
-function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
-
-var codes = {};
-
-function createErrorType(code, message, Base) {
- if (!Base) {
- Base = Error;
- }
-
- function getMessage(arg1, arg2, arg3) {
- if (typeof message === 'string') {
- return message;
- } else {
- return message(arg1, arg2, arg3);
- }
- }
-
- var NodeError =
- /*#__PURE__*/
- function (_Base) {
- _inheritsLoose(NodeError, _Base);
-
- function NodeError(arg1, arg2, arg3) {
- return _Base.call(this, getMessage(arg1, arg2, arg3)) || this;
- }
-
- return NodeError;
- }(Base);
-
- NodeError.prototype.name = Base.name;
- NodeError.prototype.code = code;
- codes[code] = NodeError;
-} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
-
-
-function oneOf(expected, thing) {
- if (Array.isArray(expected)) {
- var len = expected.length;
- expected = expected.map(function (i) {
- return String(i);
- });
-
- if (len > 2) {
- return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1];
- } else if (len === 2) {
- return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]);
- } else {
- return "of ".concat(thing, " ").concat(expected[0]);
- }
- } else {
- return "of ".concat(thing, " ").concat(String(expected));
- }
-} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
-
-
-function startsWith(str, search, pos) {
- return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
-} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
-
-
-function endsWith(str, search, this_len) {
- if (this_len === undefined || this_len > str.length) {
- this_len = str.length;
- }
-
- return str.substring(this_len - search.length, this_len) === search;
-} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
-
-
-function includes(str, search, start) {
- if (typeof start !== 'number') {
- start = 0;
- }
-
- if (start + search.length > str.length) {
- return false;
- } else {
- return str.indexOf(search, start) !== -1;
- }
-}
-
-createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
- return 'The value "' + value + '" is invalid for option "' + name + '"';
-}, TypeError);
-createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
- // determiner: 'must be' or 'must not be'
- var determiner;
-
- if (typeof expected === 'string' && startsWith(expected, 'not ')) {
- determiner = 'must not be';
- expected = expected.replace(/^not /, '');
- } else {
- determiner = 'must be';
- }
-
- var msg;
-
- if (endsWith(name, ' argument')) {
- // For cases like 'first argument'
- msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
- } else {
- var type = includes(name, '.') ? 'property' : 'argument';
- msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
- }
-
- msg += ". Received type ".concat(typeof actual);
- return msg;
-}, TypeError);
-createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
-createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
- return 'The ' + name + ' method is not implemented';
-});
-createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
-createErrorType('ERR_STREAM_DESTROYED', function (name) {
- return 'Cannot call ' + name + ' after a stream was destroyed';
-});
-createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
-createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
-createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
-createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
-createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
- return 'Unknown encoding: ' + arg;
-}, TypeError);
-createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
-module.exports.codes = codes;
diff --git a/errors.js b/errors.js
deleted file mode 100644
index 8471526d6e..0000000000
--- a/errors.js
+++ /dev/null
@@ -1,116 +0,0 @@
-'use strict';
-
-const codes = {};
-
-function createErrorType(code, message, Base) {
- if (!Base) {
- Base = Error
- }
-
- function getMessage (arg1, arg2, arg3) {
- if (typeof message === 'string') {
- return message
- } else {
- return message(arg1, arg2, arg3)
- }
- }
-
- class NodeError extends Base {
- constructor (arg1, arg2, arg3) {
- super(getMessage(arg1, arg2, arg3));
- }
- }
-
- NodeError.prototype.name = Base.name;
- NodeError.prototype.code = code;
-
- codes[code] = NodeError;
-}
-
-// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
-function oneOf(expected, thing) {
- if (Array.isArray(expected)) {
- const len = expected.length;
- expected = expected.map((i) => String(i));
- if (len > 2) {
- return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +
- expected[len - 1];
- } else if (len === 2) {
- return `one of ${thing} ${expected[0]} or ${expected[1]}`;
- } else {
- return `of ${thing} ${expected[0]}`;
- }
- } else {
- return `of ${thing} ${String(expected)}`;
- }
-}
-
-// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
-function startsWith(str, search, pos) {
- return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
-}
-
-// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
-function endsWith(str, search, this_len) {
- if (this_len === undefined || this_len > str.length) {
- this_len = str.length;
- }
- return str.substring(this_len - search.length, this_len) === search;
-}
-
-// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
-function includes(str, search, start) {
- if (typeof start !== 'number') {
- start = 0;
- }
-
- if (start + search.length > str.length) {
- return false;
- } else {
- return str.indexOf(search, start) !== -1;
- }
-}
-
-createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
- return 'The value "' + value + '" is invalid for option "' + name + '"'
-}, TypeError);
-createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
- // determiner: 'must be' or 'must not be'
- let determiner;
- if (typeof expected === 'string' && startsWith(expected, 'not ')) {
- determiner = 'must not be';
- expected = expected.replace(/^not /, '');
- } else {
- determiner = 'must be';
- }
-
- let msg;
- if (endsWith(name, ' argument')) {
- // For cases like 'first argument'
- msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;
- } else {
- const type = includes(name, '.') ? 'property' : 'argument';
- msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`;
- }
-
- msg += `. Received type ${typeof actual}`;
- return msg;
-}, TypeError);
-createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
-createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
- return 'The ' + name + ' method is not implemented'
-});
-createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
-createErrorType('ERR_STREAM_DESTROYED', function (name) {
- return 'Cannot call ' + name + ' after a stream was destroyed';
-});
-createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
-createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
-createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
-createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
-createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
- return 'Unknown encoding: ' + arg
-}, TypeError);
-createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
-
-module.exports.codes = codes;
diff --git a/examples/CAPSLOCKTYPER.JS b/examples/CAPSLOCKTYPER.JS
deleted file mode 100644
index a35b975f4e..0000000000
--- a/examples/CAPSLOCKTYPER.JS
+++ /dev/null
@@ -1,32 +0,0 @@
-var Transform = require('../transform');
-var inherits = require('util').inherits;
-
-// subclass
-function MyStream () {
- Transform.call(this, {
- lowWaterMark: 0,
- encoding: 'utf8'
- });
-}
-inherits(MyStream, Transform);
-
-MyStream.prototype._transform = function (chunk, outputFn, callback) {
- outputFn(Buffer.from(String(chunk).toUpperCase()));
- callback();
-};
-
-// use it!
-var s = new MyStream();
-process.stdin.resume();
-process.stdin.pipe(s).pipe(process.stdout);
-if (process.stdin.setRawMode)
- process.stdin.setRawMode(true);
-process.stdin.on('data', function (c) {
- c = c.toString();
- if (c === '\u0003' || c === '\u0004') {
- process.stdin.pause();
- s.end();
- }
- if (c === '\r')
- process.stdout.write('\n');
-});
diff --git a/examples/capslock-type.cjs b/examples/capslock-type.cjs
new file mode 100644
index 0000000000..f7b48bdbf2
--- /dev/null
+++ b/examples/capslock-type.cjs
@@ -0,0 +1,31 @@
+'use strict'
+
+const { Transform } = require('../lib/ours/index')
+
+class MyStream extends Transform {
+ _transform(chunk, encoding, callback) {
+ callback(null, Buffer.from(chunk, encoding).toString('utf-8').toUpperCase())
+ }
+}
+
+const s = new MyStream()
+
+process.stdin.resume()
+process.stdin.pipe(s).pipe(process.stdout)
+
+if (process.stdin.setRawMode) {
+ process.stdin.setRawMode(true)
+}
+
+process.stdin.on('data', function (c) {
+ c = c.toString()
+
+ if (c === '\u0003' || c === '\u0004') {
+ process.stdin.pause()
+ s.end()
+ }
+
+ if (c === '\r') {
+ process.stdout.write('\n')
+ }
+})
diff --git a/examples/typer.js b/examples/typer.js
deleted file mode 100644
index c16eb6fb00..0000000000
--- a/examples/typer.js
+++ /dev/null
@@ -1,17 +0,0 @@
-var fs = require('fs');
-var fst = fs.createReadStream(__filename);
-var Readable = require('../readable.js');
-var rst = new Readable();
-rst.wrap(fst);
-
-rst.on('end', function() {
- process.stdin.pause();
-});
-
-process.stdin.setRawMode(true);
-process.stdin.on('data', function() {
- var c = rst.read(3);
- if (!c) return setTimeout(process.exit, 500)
- process.stdout.write(c);
-});
-process.stdin.resume();
diff --git a/examples/typer.mjs b/examples/typer.mjs
new file mode 100644
index 0000000000..c063b5d187
--- /dev/null
+++ b/examples/typer.mjs
@@ -0,0 +1,24 @@
+import { createReadStream } from 'node:fs'
+import process from 'node:process'
+import { fileURLToPath } from 'node:url'
+import { Readable } from '../lib/ours/index.js'
+
+const fst = createReadStream(fileURLToPath(new URL(import.meta.url)))
+const rst = new Readable()
+
+rst.wrap(fst)
+
+rst.on('end', function () {
+ process.stdin.pause()
+})
+
+console.log("Every time you press a key, you will see more contents of the source file. Let's begin!\n\n")
+process.stdin.setRawMode(true)
+process.stdin.on('data', function () {
+ const c = rst.read(100)
+ if (!c) {
+ return setTimeout(process.exit, 500)
+ }
+ process.stdout.write(c)
+})
+process.stdin.resume()
diff --git a/experimentalWarning.js b/experimentalWarning.js
deleted file mode 100644
index 78e841495b..0000000000
--- a/experimentalWarning.js
+++ /dev/null
@@ -1,17 +0,0 @@
-'use strict'
-
-var experimentalWarnings = new Set();
-
-function emitExperimentalWarning(feature) {
- if (experimentalWarnings.has(feature)) return;
- var msg = feature + ' is an experimental feature. This feature could ' +
- 'change at any time';
- experimentalWarnings.add(feature);
- process.emitWarning(msg, 'ExperimentalWarning');
-}
-
-function noop() {}
-
-module.exports.emitExperimentalWarning = process.emitWarning
- ? emitExperimentalWarning
- : noop;
diff --git a/lib/_stream_duplex.js b/lib/_stream_duplex.js
index 6752519225..105eebbd1c 100644
--- a/lib/_stream_duplex.js
+++ b/lib/_stream_duplex.js
@@ -1,139 +1,3 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-// a duplex stream is just a stream that is both readable and writable.
-// Since JS doesn't have multiple prototypal inheritance, this class
-// prototypally inherits from Readable, and then parasitically from
-// Writable.
-'use strict';
-/**/
+'use strict' // Keep this file as an alias for the full stream module.
-var objectKeys = Object.keys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-module.exports = Duplex;
-
-var Readable = require('./_stream_readable');
-
-var Writable = require('./_stream_writable');
-
-require('inherits')(Duplex, Readable);
-
-{
- // Allow the keys array to be GC'ed.
- var keys = objectKeys(Writable.prototype);
-
- for (var v = 0; v < keys.length; v++) {
- var method = keys[v];
- if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
- }
-}
-
-function Duplex(options) {
- if (!(this instanceof Duplex)) return new Duplex(options);
- Readable.call(this, options);
- Writable.call(this, options);
- this.allowHalfOpen = true;
-
- if (options) {
- if (options.readable === false) this.readable = false;
- if (options.writable === false) this.writable = false;
-
- if (options.allowHalfOpen === false) {
- this.allowHalfOpen = false;
- this.once('end', onend);
- }
- }
-}
-
-Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState.highWaterMark;
- }
-});
-Object.defineProperty(Duplex.prototype, 'writableBuffer', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState && this._writableState.getBuffer();
- }
-});
-Object.defineProperty(Duplex.prototype, 'writableLength', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState.length;
- }
-}); // the no-half-open enforcer
-
-function onend() {
- // If the writable side ended, then we're ok.
- if (this._writableState.ended) return; // no more data can be written.
- // But allow more writes to happen in this tick.
-
- process.nextTick(onEndNT, this);
-}
-
-function onEndNT(self) {
- self.end();
-}
-
-Object.defineProperty(Duplex.prototype, 'destroyed', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- if (this._readableState === undefined || this._writableState === undefined) {
- return false;
- }
-
- return this._readableState.destroyed && this._writableState.destroyed;
- },
- set: function set(value) {
- // we ignore the value if the stream
- // has not been initialized yet
- if (this._readableState === undefined || this._writableState === undefined) {
- return;
- } // backward compatibility, the user is explicitly
- // managing destroyed
-
-
- this._readableState.destroyed = value;
- this._writableState.destroyed = value;
- }
-});
\ No newline at end of file
+module.exports = require('./stream').Duplex
diff --git a/lib/_stream_passthrough.js b/lib/_stream_passthrough.js
index 32e7414c5a..31358e6d12 100644
--- a/lib/_stream_passthrough.js
+++ b/lib/_stream_passthrough.js
@@ -1,39 +1,3 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-// a passthrough stream.
-// basically just the most minimal sort of Transform stream.
-// Every written chunk gets output as-is.
-'use strict';
+'use strict' // Keep this file as an alias for the full stream module.
-module.exports = PassThrough;
-
-var Transform = require('./_stream_transform');
-
-require('inherits')(PassThrough, Transform);
-
-function PassThrough(options) {
- if (!(this instanceof PassThrough)) return new PassThrough(options);
- Transform.call(this, options);
-}
-
-PassThrough.prototype._transform = function (chunk, encoding, cb) {
- cb(null, chunk);
-};
\ No newline at end of file
+module.exports = require('./stream').PassThrough
diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js
index 192d451488..abd53db4ca 100644
--- a/lib/_stream_readable.js
+++ b/lib/_stream_readable.js
@@ -1,1124 +1,3 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-'use strict';
+'use strict' // Keep this file as an alias for the full stream module.
-module.exports = Readable;
-/**/
-
-var Duplex;
-/**/
-
-Readable.ReadableState = ReadableState;
-/**/
-
-var EE = require('events').EventEmitter;
-
-var EElistenerCount = function EElistenerCount(emitter, type) {
- return emitter.listeners(type).length;
-};
-/**/
-
-/**/
-
-
-var Stream = require('./internal/streams/stream');
-/**/
-
-
-var Buffer = require('buffer').Buffer;
-
-var OurUint8Array = global.Uint8Array || function () {};
-
-function _uint8ArrayToBuffer(chunk) {
- return Buffer.from(chunk);
-}
-
-function _isUint8Array(obj) {
- return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
-}
-/**/
-
-
-var debugUtil = require('util');
-
-var debug;
-
-if (debugUtil && debugUtil.debuglog) {
- debug = debugUtil.debuglog('stream');
-} else {
- debug = function debug() {};
-}
-/**/
-
-
-var BufferList = require('./internal/streams/buffer_list');
-
-var destroyImpl = require('./internal/streams/destroy');
-
-var _require = require('./internal/streams/state'),
- getHighWaterMark = _require.getHighWaterMark;
-
-var _require$codes = require('../errors').codes,
- ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
- ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,
- ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
- ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance.
-
-
-var StringDecoder;
-var createReadableStreamAsyncIterator;
-var from;
-
-require('inherits')(Readable, Stream);
-
-var errorOrDestroy = destroyImpl.errorOrDestroy;
-var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
-
-function prependListener(emitter, event, fn) {
- // Sadly this is not cacheable as some libraries bundle their own
- // event emitter implementation with them.
- if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any
- // userland ones. NEVER DO THIS. This is here only because this code needs
- // to continue to work with older versions of Node.js that do not include
- // the prependListener() method. The goal is to eventually remove this hack.
-
- if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
-}
-
-function ReadableState(options, stream, isDuplex) {
- Duplex = Duplex || require('./_stream_duplex');
- options = options || {}; // Duplex streams are both readable and writable, but share
- // the same options object.
- // However, some cases require setting options to different
- // values for the readable and the writable sides of the duplex stream.
- // These options can be provided separately as readableXXX and writableXXX.
-
- if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to
- // make all the buffer merging and length checks go away
-
- this.objectMode = !!options.objectMode;
- if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer
- // Note: 0 is a valid value, means "don't call _read preemptively ever"
-
- this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the
- // linked list can remove elements from the beginning faster than
- // array.shift()
-
- this.buffer = new BufferList();
- this.length = 0;
- this.pipes = null;
- this.pipesCount = 0;
- this.flowing = null;
- this.ended = false;
- this.endEmitted = false;
- this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted
- // immediately, or on a later tick. We set this to true at first, because
- // any actions that shouldn't happen until "later" should generally also
- // not happen before the first read call.
-
- this.sync = true; // whenever we return null, then we set a flag to say
- // that we're awaiting a 'readable' event emission.
-
- this.needReadable = false;
- this.emittedReadable = false;
- this.readableListening = false;
- this.resumeScheduled = false;
- this.paused = true; // Should close be emitted on destroy. Defaults to true.
-
- this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish')
-
- this.autoDestroy = !!options.autoDestroy; // has it been destroyed
-
- this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string
- // encoding is 'binary' so we have to make this configurable.
- // Everything else in the universe uses 'utf8', though.
-
- this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s
-
- this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled
-
- this.readingMore = false;
- this.decoder = null;
- this.encoding = null;
-
- if (options.encoding) {
- if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
- this.decoder = new StringDecoder(options.encoding);
- this.encoding = options.encoding;
- }
-}
-
-function Readable(options) {
- Duplex = Duplex || require('./_stream_duplex');
- if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside
- // the ReadableState constructor, at least with V8 6.5
-
- var isDuplex = this instanceof Duplex;
- this._readableState = new ReadableState(options, this, isDuplex); // legacy
-
- this.readable = true;
-
- if (options) {
- if (typeof options.read === 'function') this._read = options.read;
- if (typeof options.destroy === 'function') this._destroy = options.destroy;
- }
-
- Stream.call(this);
-}
-
-Object.defineProperty(Readable.prototype, 'destroyed', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- if (this._readableState === undefined) {
- return false;
- }
-
- return this._readableState.destroyed;
- },
- set: function set(value) {
- // we ignore the value if the stream
- // has not been initialized yet
- if (!this._readableState) {
- return;
- } // backward compatibility, the user is explicitly
- // managing destroyed
-
-
- this._readableState.destroyed = value;
- }
-});
-Readable.prototype.destroy = destroyImpl.destroy;
-Readable.prototype._undestroy = destroyImpl.undestroy;
-
-Readable.prototype._destroy = function (err, cb) {
- cb(err);
-}; // Manually shove something into the read() buffer.
-// This returns true if the highWaterMark has not been hit yet,
-// similar to how Writable.write() returns true if you should
-// write() some more.
-
-
-Readable.prototype.push = function (chunk, encoding) {
- var state = this._readableState;
- var skipChunkCheck;
-
- if (!state.objectMode) {
- if (typeof chunk === 'string') {
- encoding = encoding || state.defaultEncoding;
-
- if (encoding !== state.encoding) {
- chunk = Buffer.from(chunk, encoding);
- encoding = '';
- }
-
- skipChunkCheck = true;
- }
- } else {
- skipChunkCheck = true;
- }
-
- return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
-}; // Unshift should *always* be something directly out of read()
-
-
-Readable.prototype.unshift = function (chunk) {
- return readableAddChunk(this, chunk, null, true, false);
-};
-
-function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
- debug('readableAddChunk', chunk);
- var state = stream._readableState;
-
- if (chunk === null) {
- state.reading = false;
- onEofChunk(stream, state);
- } else {
- var er;
- if (!skipChunkCheck) er = chunkInvalid(state, chunk);
-
- if (er) {
- errorOrDestroy(stream, er);
- } else if (state.objectMode || chunk && chunk.length > 0) {
- if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
- chunk = _uint8ArrayToBuffer(chunk);
- }
-
- if (addToFront) {
- if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);
- } else if (state.ended) {
- errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());
- } else if (state.destroyed) {
- return false;
- } else {
- state.reading = false;
-
- if (state.decoder && !encoding) {
- chunk = state.decoder.write(chunk);
- if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
- } else {
- addChunk(stream, state, chunk, false);
- }
- }
- } else if (!addToFront) {
- state.reading = false;
- maybeReadMore(stream, state);
- }
- } // We can push more data if we are below the highWaterMark.
- // Also, if we have no data yet, we can stand some more bytes.
- // This is to work around cases where hwm=0, such as the repl.
-
-
- return !state.ended && (state.length < state.highWaterMark || state.length === 0);
-}
-
-function addChunk(stream, state, chunk, addToFront) {
- if (state.flowing && state.length === 0 && !state.sync) {
- state.awaitDrain = 0;
- stream.emit('data', chunk);
- } else {
- // update the buffer info.
- state.length += state.objectMode ? 1 : chunk.length;
- if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
- if (state.needReadable) emitReadable(stream);
- }
-
- maybeReadMore(stream, state);
-}
-
-function chunkInvalid(state, chunk) {
- var er;
-
- if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
- er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);
- }
-
- return er;
-}
-
-Readable.prototype.isPaused = function () {
- return this._readableState.flowing === false;
-}; // backwards compatibility.
-
-
-Readable.prototype.setEncoding = function (enc) {
- if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
- var decoder = new StringDecoder(enc);
- this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8
-
- this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers:
-
- var p = this._readableState.buffer.head;
- var content = '';
-
- while (p !== null) {
- content += decoder.write(p.data);
- p = p.next;
- }
-
- this._readableState.buffer.clear();
-
- if (content !== '') this._readableState.buffer.push(content);
- this._readableState.length = content.length;
- return this;
-}; // Don't raise the hwm > 1GB
-
-
-var MAX_HWM = 0x40000000;
-
-function computeNewHighWaterMark(n) {
- if (n >= MAX_HWM) {
- // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.
- n = MAX_HWM;
- } else {
- // Get the next highest power of 2 to prevent increasing hwm excessively in
- // tiny amounts
- n--;
- n |= n >>> 1;
- n |= n >>> 2;
- n |= n >>> 4;
- n |= n >>> 8;
- n |= n >>> 16;
- n++;
- }
-
- return n;
-} // This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-
-
-function howMuchToRead(n, state) {
- if (n <= 0 || state.length === 0 && state.ended) return 0;
- if (state.objectMode) return 1;
-
- if (n !== n) {
- // Only flow one buffer at a time
- if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
- } // If we're asking for more than the current hwm, then raise the hwm.
-
-
- if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
- if (n <= state.length) return n; // Don't have enough
-
- if (!state.ended) {
- state.needReadable = true;
- return 0;
- }
-
- return state.length;
-} // you can override either this method, or the async _read(n) below.
-
-
-Readable.prototype.read = function (n) {
- debug('read', n);
- n = parseInt(n, 10);
- var state = this._readableState;
- var nOrig = n;
- if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we
- // already have a bunch of data in the buffer, then just trigger
- // the 'readable' event and move on.
-
- if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {
- debug('read: emitReadable', state.length, state.ended);
- if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
- return null;
- }
-
- n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up.
-
- if (n === 0 && state.ended) {
- if (state.length === 0) endReadable(this);
- return null;
- } // All the actual chunk generation logic needs to be
- // *below* the call to _read. The reason is that in certain
- // synthetic stream cases, such as passthrough streams, _read
- // may be a completely synchronous operation which may change
- // the state of the read buffer, providing enough data when
- // before there was *not* enough.
- //
- // So, the steps are:
- // 1. Figure out what the state of things will be after we do
- // a read from the buffer.
- //
- // 2. If that resulting state will trigger a _read, then call _read.
- // Note that this may be asynchronous, or synchronous. Yes, it is
- // deeply ugly to write APIs this way, but that still doesn't mean
- // that the Readable class should behave improperly, as streams are
- // designed to be sync/async agnostic.
- // Take note if the _read call is sync or async (ie, if the read call
- // has returned yet), so that we know whether or not it's safe to emit
- // 'readable' etc.
- //
- // 3. Actually pull the requested chunks out of the buffer and return.
- // if we need a readable event, then we need to do some reading.
-
-
- var doRead = state.needReadable;
- debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some
-
- if (state.length === 0 || state.length - n < state.highWaterMark) {
- doRead = true;
- debug('length less than watermark', doRead);
- } // however, if we've ended, then there's no point, and if we're already
- // reading, then it's unnecessary.
-
-
- if (state.ended || state.reading) {
- doRead = false;
- debug('reading or ended', doRead);
- } else if (doRead) {
- debug('do read');
- state.reading = true;
- state.sync = true; // if the length is currently zero, then we *need* a readable event.
-
- if (state.length === 0) state.needReadable = true; // call internal read method
-
- this._read(state.highWaterMark);
-
- state.sync = false; // If _read pushed data synchronously, then `reading` will be false,
- // and we need to re-evaluate how much data we can return to the user.
-
- if (!state.reading) n = howMuchToRead(nOrig, state);
- }
-
- var ret;
- if (n > 0) ret = fromList(n, state);else ret = null;
-
- if (ret === null) {
- state.needReadable = state.length <= state.highWaterMark;
- n = 0;
- } else {
- state.length -= n;
- state.awaitDrain = 0;
- }
-
- if (state.length === 0) {
- // If we have nothing in the buffer, then we want to know
- // as soon as we *do* get something into the buffer.
- if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick.
-
- if (nOrig !== n && state.ended) endReadable(this);
- }
-
- if (ret !== null) this.emit('data', ret);
- return ret;
-};
-
-function onEofChunk(stream, state) {
- debug('onEofChunk');
- if (state.ended) return;
-
- if (state.decoder) {
- var chunk = state.decoder.end();
-
- if (chunk && chunk.length) {
- state.buffer.push(chunk);
- state.length += state.objectMode ? 1 : chunk.length;
- }
- }
-
- state.ended = true;
-
- if (state.sync) {
- // if we are sync, wait until next tick to emit the data.
- // Otherwise we risk emitting data in the flow()
- // the readable code triggers during a read() call
- emitReadable(stream);
- } else {
- // emit 'readable' now to make sure it gets picked up.
- state.needReadable = false;
-
- if (!state.emittedReadable) {
- state.emittedReadable = true;
- emitReadable_(stream);
- }
- }
-} // Don't emit readable right away in sync mode, because this can trigger
-// another read() call => stack overflow. This way, it might trigger
-// a nextTick recursion warning, but that's not so bad.
-
-
-function emitReadable(stream) {
- var state = stream._readableState;
- debug('emitReadable', state.needReadable, state.emittedReadable);
- state.needReadable = false;
-
- if (!state.emittedReadable) {
- debug('emitReadable', state.flowing);
- state.emittedReadable = true;
- process.nextTick(emitReadable_, stream);
- }
-}
-
-function emitReadable_(stream) {
- var state = stream._readableState;
- debug('emitReadable_', state.destroyed, state.length, state.ended);
-
- if (!state.destroyed && (state.length || state.ended)) {
- stream.emit('readable');
- state.emittedReadable = false;
- } // The stream needs another readable event if
- // 1. It is not flowing, as the flow mechanism will take
- // care of it.
- // 2. It is not ended.
- // 3. It is below the highWaterMark, so we can schedule
- // another readable later.
-
-
- state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;
- flow(stream);
-} // at this point, the user has presumably seen the 'readable' event,
-// and called read() to consume some data. that may have triggered
-// in turn another _read(n) call, in which case reading = true if
-// it's in progress.
-// However, if we're not ended, or reading, and the length < hwm,
-// then go ahead and try to read some more preemptively.
-
-
-function maybeReadMore(stream, state) {
- if (!state.readingMore) {
- state.readingMore = true;
- process.nextTick(maybeReadMore_, stream, state);
- }
-}
-
-function maybeReadMore_(stream, state) {
- // Attempt to read more data if we should.
- //
- // The conditions for reading more data are (one of):
- // - Not enough data buffered (state.length < state.highWaterMark). The loop
- // is responsible for filling the buffer with enough data if such data
- // is available. If highWaterMark is 0 and we are not in the flowing mode
- // we should _not_ attempt to buffer any extra data. We'll get more data
- // when the stream consumer calls read() instead.
- // - No data in the buffer, and the stream is in flowing mode. In this mode
- // the loop below is responsible for ensuring read() is called. Failing to
- // call read here would abort the flow and there's no other mechanism for
- // continuing the flow if the stream consumer has just subscribed to the
- // 'data' event.
- //
- // In addition to the above conditions to keep reading data, the following
- // conditions prevent the data from being read:
- // - The stream has ended (state.ended).
- // - There is already a pending 'read' operation (state.reading). This is a
- // case where the the stream has called the implementation defined _read()
- // method, but they are processing the call asynchronously and have _not_
- // called push() with new data. In this case we skip performing more
- // read()s. The execution ends in this method again after the _read() ends
- // up calling push() with more data.
- while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {
- var len = state.length;
- debug('maybeReadMore read 0');
- stream.read(0);
- if (len === state.length) // didn't get any data, stop spinning.
- break;
- }
-
- state.readingMore = false;
-} // abstract method. to be overridden in specific implementation classes.
-// call cb(er, data) where data is <= n in length.
-// for virtual (non-string, non-buffer) streams, "length" is somewhat
-// arbitrary, and perhaps not very meaningful.
-
-
-Readable.prototype._read = function (n) {
- errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()'));
-};
-
-Readable.prototype.pipe = function (dest, pipeOpts) {
- var src = this;
- var state = this._readableState;
-
- switch (state.pipesCount) {
- case 0:
- state.pipes = dest;
- break;
-
- case 1:
- state.pipes = [state.pipes, dest];
- break;
-
- default:
- state.pipes.push(dest);
- break;
- }
-
- state.pipesCount += 1;
- debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
- var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
- var endFn = doEnd ? onend : unpipe;
- if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);
- dest.on('unpipe', onunpipe);
-
- function onunpipe(readable, unpipeInfo) {
- debug('onunpipe');
-
- if (readable === src) {
- if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
- unpipeInfo.hasUnpiped = true;
- cleanup();
- }
- }
- }
-
- function onend() {
- debug('onend');
- dest.end();
- } // when the dest drains, it reduces the awaitDrain counter
- // on the source. This would be more elegant with a .once()
- // handler in flow(), but adding and removing repeatedly is
- // too slow.
-
-
- var ondrain = pipeOnDrain(src);
- dest.on('drain', ondrain);
- var cleanedUp = false;
-
- function cleanup() {
- debug('cleanup'); // cleanup event handlers once the pipe is broken
-
- dest.removeListener('close', onclose);
- dest.removeListener('finish', onfinish);
- dest.removeListener('drain', ondrain);
- dest.removeListener('error', onerror);
- dest.removeListener('unpipe', onunpipe);
- src.removeListener('end', onend);
- src.removeListener('end', unpipe);
- src.removeListener('data', ondata);
- cleanedUp = true; // if the reader is waiting for a drain event from this
- // specific writer, then it would cause it to never start
- // flowing again.
- // So, if this is awaiting a drain, then we just call it now.
- // If we don't know, then assume that we are waiting for one.
-
- if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
- }
-
- src.on('data', ondata);
-
- function ondata(chunk) {
- debug('ondata');
- var ret = dest.write(chunk);
- debug('dest.write', ret);
-
- if (ret === false) {
- // If the user unpiped during `dest.write()`, it is possible
- // to get stuck in a permanently paused state if that write
- // also returned false.
- // => Check whether `dest` is still a piping destination.
- if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
- debug('false write response, pause', state.awaitDrain);
- state.awaitDrain++;
- }
-
- src.pause();
- }
- } // if the dest has an error, then stop piping into it.
- // however, don't suppress the throwing behavior for this.
-
-
- function onerror(er) {
- debug('onerror', er);
- unpipe();
- dest.removeListener('error', onerror);
- if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);
- } // Make sure our error handler is attached before userland ones.
-
-
- prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once.
-
- function onclose() {
- dest.removeListener('finish', onfinish);
- unpipe();
- }
-
- dest.once('close', onclose);
-
- function onfinish() {
- debug('onfinish');
- dest.removeListener('close', onclose);
- unpipe();
- }
-
- dest.once('finish', onfinish);
-
- function unpipe() {
- debug('unpipe');
- src.unpipe(dest);
- } // tell the dest that it's being piped to
-
-
- dest.emit('pipe', src); // start the flow if it hasn't been started already.
-
- if (!state.flowing) {
- debug('pipe resume');
- src.resume();
- }
-
- return dest;
-};
-
-function pipeOnDrain(src) {
- return function pipeOnDrainFunctionResult() {
- var state = src._readableState;
- debug('pipeOnDrain', state.awaitDrain);
- if (state.awaitDrain) state.awaitDrain--;
-
- if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
- state.flowing = true;
- flow(src);
- }
- };
-}
-
-Readable.prototype.unpipe = function (dest) {
- var state = this._readableState;
- var unpipeInfo = {
- hasUnpiped: false
- }; // if we're not piping anywhere, then do nothing.
-
- if (state.pipesCount === 0) return this; // just one destination. most common case.
-
- if (state.pipesCount === 1) {
- // passed in one, but it's not the right one.
- if (dest && dest !== state.pipes) return this;
- if (!dest) dest = state.pipes; // got a match.
-
- state.pipes = null;
- state.pipesCount = 0;
- state.flowing = false;
- if (dest) dest.emit('unpipe', this, unpipeInfo);
- return this;
- } // slow case. multiple pipe destinations.
-
-
- if (!dest) {
- // remove all.
- var dests = state.pipes;
- var len = state.pipesCount;
- state.pipes = null;
- state.pipesCount = 0;
- state.flowing = false;
-
- for (var i = 0; i < len; i++) {
- dests[i].emit('unpipe', this, {
- hasUnpiped: false
- });
- }
-
- return this;
- } // try to find the right one.
-
-
- var index = indexOf(state.pipes, dest);
- if (index === -1) return this;
- state.pipes.splice(index, 1);
- state.pipesCount -= 1;
- if (state.pipesCount === 1) state.pipes = state.pipes[0];
- dest.emit('unpipe', this, unpipeInfo);
- return this;
-}; // set up data events if they are asked for
-// Ensure readable listeners eventually get something
-
-
-Readable.prototype.on = function (ev, fn) {
- var res = Stream.prototype.on.call(this, ev, fn);
- var state = this._readableState;
-
- if (ev === 'data') {
- // update readableListening so that resume() may be a no-op
- // a few lines down. This is needed to support once('readable').
- state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused
-
- if (state.flowing !== false) this.resume();
- } else if (ev === 'readable') {
- if (!state.endEmitted && !state.readableListening) {
- state.readableListening = state.needReadable = true;
- state.flowing = false;
- state.emittedReadable = false;
- debug('on readable', state.length, state.reading);
-
- if (state.length) {
- emitReadable(this);
- } else if (!state.reading) {
- process.nextTick(nReadingNextTick, this);
- }
- }
- }
-
- return res;
-};
-
-Readable.prototype.addListener = Readable.prototype.on;
-
-Readable.prototype.removeListener = function (ev, fn) {
- var res = Stream.prototype.removeListener.call(this, ev, fn);
-
- if (ev === 'readable') {
- // We need to check if there is someone still listening to
- // readable and reset the state. However this needs to happen
- // after readable has been emitted but before I/O (nextTick) to
- // support once('readable', fn) cycles. This means that calling
- // resume within the same tick will have no
- // effect.
- process.nextTick(updateReadableListening, this);
- }
-
- return res;
-};
-
-Readable.prototype.removeAllListeners = function (ev) {
- var res = Stream.prototype.removeAllListeners.apply(this, arguments);
-
- if (ev === 'readable' || ev === undefined) {
- // We need to check if there is someone still listening to
- // readable and reset the state. However this needs to happen
- // after readable has been emitted but before I/O (nextTick) to
- // support once('readable', fn) cycles. This means that calling
- // resume within the same tick will have no
- // effect.
- process.nextTick(updateReadableListening, this);
- }
-
- return res;
-};
-
-function updateReadableListening(self) {
- var state = self._readableState;
- state.readableListening = self.listenerCount('readable') > 0;
-
- if (state.resumeScheduled && !state.paused) {
- // flowing needs to be set to true now, otherwise
- // the upcoming resume will not flow.
- state.flowing = true; // crude way to check if we should resume
- } else if (self.listenerCount('data') > 0) {
- self.resume();
- }
-}
-
-function nReadingNextTick(self) {
- debug('readable nexttick read 0');
- self.read(0);
-} // pause() and resume() are remnants of the legacy readable stream API
-// If the user uses them, then switch into old mode.
-
-
-Readable.prototype.resume = function () {
- var state = this._readableState;
-
- if (!state.flowing) {
- debug('resume'); // we flow only if there is no one listening
- // for readable, but we still have to call
- // resume()
-
- state.flowing = !state.readableListening;
- resume(this, state);
- }
-
- state.paused = false;
- return this;
-};
-
-function resume(stream, state) {
- if (!state.resumeScheduled) {
- state.resumeScheduled = true;
- process.nextTick(resume_, stream, state);
- }
-}
-
-function resume_(stream, state) {
- debug('resume', state.reading);
-
- if (!state.reading) {
- stream.read(0);
- }
-
- state.resumeScheduled = false;
- stream.emit('resume');
- flow(stream);
- if (state.flowing && !state.reading) stream.read(0);
-}
-
-Readable.prototype.pause = function () {
- debug('call pause flowing=%j', this._readableState.flowing);
-
- if (this._readableState.flowing !== false) {
- debug('pause');
- this._readableState.flowing = false;
- this.emit('pause');
- }
-
- this._readableState.paused = true;
- return this;
-};
-
-function flow(stream) {
- var state = stream._readableState;
- debug('flow', state.flowing);
-
- while (state.flowing && stream.read() !== null) {
- ;
- }
-} // wrap an old-style stream as the async data source.
-// This is *not* part of the readable stream interface.
-// It is an ugly unfortunate mess of history.
-
-
-Readable.prototype.wrap = function (stream) {
- var _this = this;
-
- var state = this._readableState;
- var paused = false;
- stream.on('end', function () {
- debug('wrapped end');
-
- if (state.decoder && !state.ended) {
- var chunk = state.decoder.end();
- if (chunk && chunk.length) _this.push(chunk);
- }
-
- _this.push(null);
- });
- stream.on('data', function (chunk) {
- debug('wrapped data');
- if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode
-
- if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
-
- var ret = _this.push(chunk);
-
- if (!ret) {
- paused = true;
- stream.pause();
- }
- }); // proxy all the other methods.
- // important when wrapping filters and duplexes.
-
- for (var i in stream) {
- if (this[i] === undefined && typeof stream[i] === 'function') {
- this[i] = function methodWrap(method) {
- return function methodWrapReturnFunction() {
- return stream[method].apply(stream, arguments);
- };
- }(i);
- }
- } // proxy certain important events.
-
-
- for (var n = 0; n < kProxyEvents.length; n++) {
- stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
- } // when we try to consume some more bytes, simply unpause the
- // underlying stream.
-
-
- this._read = function (n) {
- debug('wrapped _read', n);
-
- if (paused) {
- paused = false;
- stream.resume();
- }
- };
-
- return this;
-};
-
-if (typeof Symbol === 'function') {
- Readable.prototype[Symbol.asyncIterator] = function () {
- if (createReadableStreamAsyncIterator === undefined) {
- createReadableStreamAsyncIterator = require('./internal/streams/async_iterator');
- }
-
- return createReadableStreamAsyncIterator(this);
- };
-}
-
-Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._readableState.highWaterMark;
- }
-});
-Object.defineProperty(Readable.prototype, 'readableBuffer', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._readableState && this._readableState.buffer;
- }
-});
-Object.defineProperty(Readable.prototype, 'readableFlowing', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._readableState.flowing;
- },
- set: function set(state) {
- if (this._readableState) {
- this._readableState.flowing = state;
- }
- }
-}); // exposed for testing purposes only.
-
-Readable._fromList = fromList;
-Object.defineProperty(Readable.prototype, 'readableLength', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._readableState.length;
- }
-}); // Pluck off n bytes from an array of buffers.
-// Length is the combined lengths of all the buffers in the list.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-
-function fromList(n, state) {
- // nothing buffered
- if (state.length === 0) return null;
- var ret;
- if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
- // read it all, truncate the list
- if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);
- state.buffer.clear();
- } else {
- // read part of list
- ret = state.buffer.consume(n, state.decoder);
- }
- return ret;
-}
-
-function endReadable(stream) {
- var state = stream._readableState;
- debug('endReadable', state.endEmitted);
-
- if (!state.endEmitted) {
- state.ended = true;
- process.nextTick(endReadableNT, state, stream);
- }
-}
-
-function endReadableNT(state, stream) {
- debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift.
-
- if (!state.endEmitted && state.length === 0) {
- state.endEmitted = true;
- stream.readable = false;
- stream.emit('end');
-
- if (state.autoDestroy) {
- // In case of duplex streams we need a way to detect
- // if the writable side is ready for autoDestroy as well
- var wState = stream._writableState;
-
- if (!wState || wState.autoDestroy && wState.finished) {
- stream.destroy();
- }
- }
- }
-}
-
-if (typeof Symbol === 'function') {
- Readable.from = function (iterable, opts) {
- if (from === undefined) {
- from = require('./internal/streams/from');
- }
-
- return from(Readable, iterable, opts);
- };
-}
-
-function indexOf(xs, x) {
- for (var i = 0, l = xs.length; i < l; i++) {
- if (xs[i] === x) return i;
- }
-
- return -1;
-}
\ No newline at end of file
+module.exports = require('./stream').Readable
diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js
index 41a738c4e9..98ea338248 100644
--- a/lib/_stream_transform.js
+++ b/lib/_stream_transform.js
@@ -1,201 +1,3 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-// a transform stream is a readable/writable stream where you do
-// something with the data. Sometimes it's called a "filter",
-// but that's not a great name for it, since that implies a thing where
-// some bits pass through, and others are simply ignored. (That would
-// be a valid example of a transform, of course.)
-//
-// While the output is causally related to the input, it's not a
-// necessarily symmetric or synchronous transformation. For example,
-// a zlib stream might take multiple plain-text writes(), and then
-// emit a single compressed chunk some time in the future.
-//
-// Here's how this works:
-//
-// The Transform stream has all the aspects of the readable and writable
-// stream classes. When you write(chunk), that calls _write(chunk,cb)
-// internally, and returns false if there's a lot of pending writes
-// buffered up. When you call read(), that calls _read(n) until
-// there's enough pending readable data buffered up.
-//
-// In a transform stream, the written data is placed in a buffer. When
-// _read(n) is called, it transforms the queued up data, calling the
-// buffered _write cb's as it consumes chunks. If consuming a single
-// written chunk would result in multiple output chunks, then the first
-// outputted bit calls the readcb, and subsequent chunks just go into
-// the read buffer, and will cause it to emit 'readable' if necessary.
-//
-// This way, back-pressure is actually determined by the reading side,
-// since _read has to be called to start processing a new chunk. However,
-// a pathological inflate type of transform can cause excessive buffering
-// here. For example, imagine a stream where every byte of input is
-// interpreted as an integer from 0-255, and then results in that many
-// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
-// 1kb of data being output. In this case, you could write a very small
-// amount of input, and end up with a very large amount of output. In
-// such a pathological inflating mechanism, there'd be no way to tell
-// the system to stop doing the transform. A single 4MB write could
-// cause the system to run out of memory.
-//
-// However, even in such a pathological case, only a single written chunk
-// would be consumed, and then the rest would wait (un-transformed) until
-// the results of the previous transformed chunk were consumed.
-'use strict';
+'use strict' // Keep this file as an alias for the full stream module.
-module.exports = Transform;
-
-var _require$codes = require('../errors').codes,
- ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
- ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
- ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,
- ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;
-
-var Duplex = require('./_stream_duplex');
-
-require('inherits')(Transform, Duplex);
-
-function afterTransform(er, data) {
- var ts = this._transformState;
- ts.transforming = false;
- var cb = ts.writecb;
-
- if (cb === null) {
- return this.emit('error', new ERR_MULTIPLE_CALLBACK());
- }
-
- ts.writechunk = null;
- ts.writecb = null;
- if (data != null) // single equals check for both `null` and `undefined`
- this.push(data);
- cb(er);
- var rs = this._readableState;
- rs.reading = false;
-
- if (rs.needReadable || rs.length < rs.highWaterMark) {
- this._read(rs.highWaterMark);
- }
-}
-
-function Transform(options) {
- if (!(this instanceof Transform)) return new Transform(options);
- Duplex.call(this, options);
- this._transformState = {
- afterTransform: afterTransform.bind(this),
- needTransform: false,
- transforming: false,
- writecb: null,
- writechunk: null,
- writeencoding: null
- }; // start out asking for a readable event once data is transformed.
-
- this._readableState.needReadable = true; // we have implemented the _read method, and done the other things
- // that Readable wants before the first _read call, so unset the
- // sync guard flag.
-
- this._readableState.sync = false;
-
- if (options) {
- if (typeof options.transform === 'function') this._transform = options.transform;
- if (typeof options.flush === 'function') this._flush = options.flush;
- } // When the writable side finishes, then flush out anything remaining.
-
-
- this.on('prefinish', prefinish);
-}
-
-function prefinish() {
- var _this = this;
-
- if (typeof this._flush === 'function' && !this._readableState.destroyed) {
- this._flush(function (er, data) {
- done(_this, er, data);
- });
- } else {
- done(this, null, null);
- }
-}
-
-Transform.prototype.push = function (chunk, encoding) {
- this._transformState.needTransform = false;
- return Duplex.prototype.push.call(this, chunk, encoding);
-}; // This is the part where you do stuff!
-// override this function in implementation classes.
-// 'chunk' is an input chunk.
-//
-// Call `push(newChunk)` to pass along transformed output
-// to the readable side. You may call 'push' zero or more times.
-//
-// Call `cb(err)` when you are done with this chunk. If you pass
-// an error, then that'll put the hurt on the whole operation. If you
-// never call cb(), then you'll never get another chunk.
-
-
-Transform.prototype._transform = function (chunk, encoding, cb) {
- cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));
-};
-
-Transform.prototype._write = function (chunk, encoding, cb) {
- var ts = this._transformState;
- ts.writecb = cb;
- ts.writechunk = chunk;
- ts.writeencoding = encoding;
-
- if (!ts.transforming) {
- var rs = this._readableState;
- if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
- }
-}; // Doesn't matter what the args are here.
-// _transform does all the work.
-// That we got here means that the readable side wants more data.
-
-
-Transform.prototype._read = function (n) {
- var ts = this._transformState;
-
- if (ts.writechunk !== null && !ts.transforming) {
- ts.transforming = true;
-
- this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
- } else {
- // mark that we need a transform, so that any data that comes in
- // will get processed, now that we've asked for it.
- ts.needTransform = true;
- }
-};
-
-Transform.prototype._destroy = function (err, cb) {
- Duplex.prototype._destroy.call(this, err, function (err2) {
- cb(err2);
- });
-};
-
-function done(stream, er, data) {
- if (er) return stream.emit('error', er);
- if (data != null) // single equals check for both `null` and `undefined`
- stream.push(data); // TODO(BridgeAR): Write a test for these two error cases
- // if there's nothing in the write buffer, then that means
- // that nothing more will ever be provided
-
- if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();
- if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
- return stream.push(null);
-}
\ No newline at end of file
+module.exports = require('./stream').Transform
diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js
index a2634d7c24..07204c4295 100644
--- a/lib/_stream_writable.js
+++ b/lib/_stream_writable.js
@@ -1,697 +1,3 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-// A bit simpler than readable streams.
-// Implement an async ._write(chunk, encoding, cb), and it'll handle all
-// the drain event emission and buffering.
-'use strict';
+'use strict' // Keep this file as an alias for the full stream module.
-module.exports = Writable;
-/* */
-
-function WriteReq(chunk, encoding, cb) {
- this.chunk = chunk;
- this.encoding = encoding;
- this.callback = cb;
- this.next = null;
-} // It seems a linked list but it is not
-// there will be only 2 of these for each stream
-
-
-function CorkedRequest(state) {
- var _this = this;
-
- this.next = null;
- this.entry = null;
-
- this.finish = function () {
- onCorkedFinish(_this, state);
- };
-}
-/* */
-
-/**/
-
-
-var Duplex;
-/**/
-
-Writable.WritableState = WritableState;
-/**/
-
-var internalUtil = {
- deprecate: require('util-deprecate')
-};
-/**/
-
-/**/
-
-var Stream = require('./internal/streams/stream');
-/**/
-
-
-var Buffer = require('buffer').Buffer;
-
-var OurUint8Array = global.Uint8Array || function () {};
-
-function _uint8ArrayToBuffer(chunk) {
- return Buffer.from(chunk);
-}
-
-function _isUint8Array(obj) {
- return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
-}
-
-var destroyImpl = require('./internal/streams/destroy');
-
-var _require = require('./internal/streams/state'),
- getHighWaterMark = _require.getHighWaterMark;
-
-var _require$codes = require('../errors').codes,
- ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
- ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
- ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
- ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,
- ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,
- ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,
- ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,
- ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;
-
-var errorOrDestroy = destroyImpl.errorOrDestroy;
-
-require('inherits')(Writable, Stream);
-
-function nop() {}
-
-function WritableState(options, stream, isDuplex) {
- Duplex = Duplex || require('./_stream_duplex');
- options = options || {}; // Duplex streams are both readable and writable, but share
- // the same options object.
- // However, some cases require setting options to different
- // values for the readable and the writable sides of the duplex stream,
- // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
-
- if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream
- // contains buffers or objects.
-
- this.objectMode = !!options.objectMode;
- if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false
- // Note: 0 is a valid value, means that we always return false if
- // the entire buffer is not flushed immediately on write()
-
- this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called
-
- this.finalCalled = false; // drain event flag.
-
- this.needDrain = false; // at the start of calling end()
-
- this.ending = false; // when end() has been called, and returned
-
- this.ended = false; // when 'finish' is emitted
-
- this.finished = false; // has it been destroyed
-
- this.destroyed = false; // should we decode strings into buffers before passing to _write?
- // this is here so that some node-core streams can optimize string
- // handling at a lower level.
-
- var noDecode = options.decodeStrings === false;
- this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string
- // encoding is 'binary' so we have to make this configurable.
- // Everything else in the universe uses 'utf8', though.
-
- this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement
- // of how much we're waiting to get pushed to some underlying
- // socket or file.
-
- this.length = 0; // a flag to see when we're in the middle of a write.
-
- this.writing = false; // when true all writes will be buffered until .uncork() call
-
- this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately,
- // or on a later tick. We set this to true at first, because any
- // actions that shouldn't happen until "later" should generally also
- // not happen before the first write call.
-
- this.sync = true; // a flag to know if we're processing previously buffered items, which
- // may call the _write() callback in the same tick, so that we don't
- // end up in an overlapped onwrite situation.
-
- this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb)
-
- this.onwrite = function (er) {
- onwrite(stream, er);
- }; // the callback that the user supplies to write(chunk,encoding,cb)
-
-
- this.writecb = null; // the amount that is being written when _write is called.
-
- this.writelen = 0;
- this.bufferedRequest = null;
- this.lastBufferedRequest = null; // number of pending user-supplied write callbacks
- // this must be 0 before 'finish' can be emitted
-
- this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs
- // This is relevant for synchronous Transform streams
-
- this.prefinished = false; // True if the error was already emitted and should not be thrown again
-
- this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true.
-
- this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end')
-
- this.autoDestroy = !!options.autoDestroy; // count buffered requests
-
- this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always
- // one allocated and free to use, and we maintain at most two
-
- this.corkedRequestsFree = new CorkedRequest(this);
-}
-
-WritableState.prototype.getBuffer = function getBuffer() {
- var current = this.bufferedRequest;
- var out = [];
-
- while (current) {
- out.push(current);
- current = current.next;
- }
-
- return out;
-};
-
-(function () {
- try {
- Object.defineProperty(WritableState.prototype, 'buffer', {
- get: internalUtil.deprecate(function writableStateBufferGetter() {
- return this.getBuffer();
- }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
- });
- } catch (_) {}
-})(); // Test _writableState for inheritance to account for Duplex streams,
-// whose prototype chain only points to Readable.
-
-
-var realHasInstance;
-
-if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
- realHasInstance = Function.prototype[Symbol.hasInstance];
- Object.defineProperty(Writable, Symbol.hasInstance, {
- value: function value(object) {
- if (realHasInstance.call(this, object)) return true;
- if (this !== Writable) return false;
- return object && object._writableState instanceof WritableState;
- }
- });
-} else {
- realHasInstance = function realHasInstance(object) {
- return object instanceof this;
- };
-}
-
-function Writable(options) {
- Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too.
- // `realHasInstance` is necessary because using plain `instanceof`
- // would return false, as no `_writableState` property is attached.
- // Trying to use the custom `instanceof` for Writable here will also break the
- // Node.js LazyTransform implementation, which has a non-trivial getter for
- // `_writableState` that would lead to infinite recursion.
- // Checking for a Stream.Duplex instance is faster here instead of inside
- // the WritableState constructor, at least with V8 6.5
-
- var isDuplex = this instanceof Duplex;
- if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
- this._writableState = new WritableState(options, this, isDuplex); // legacy.
-
- this.writable = true;
-
- if (options) {
- if (typeof options.write === 'function') this._write = options.write;
- if (typeof options.writev === 'function') this._writev = options.writev;
- if (typeof options.destroy === 'function') this._destroy = options.destroy;
- if (typeof options.final === 'function') this._final = options.final;
- }
-
- Stream.call(this);
-} // Otherwise people can pipe Writable streams, which is just wrong.
-
-
-Writable.prototype.pipe = function () {
- errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());
-};
-
-function writeAfterEnd(stream, cb) {
- var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb
-
- errorOrDestroy(stream, er);
- process.nextTick(cb, er);
-} // Checks that a user-supplied chunk is valid, especially for the particular
-// mode the stream is in. Currently this means that `null` is never accepted
-// and undefined/non-string values are only allowed in object mode.
-
-
-function validChunk(stream, state, chunk, cb) {
- var er;
-
- if (chunk === null) {
- er = new ERR_STREAM_NULL_VALUES();
- } else if (typeof chunk !== 'string' && !state.objectMode) {
- er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
- }
-
- if (er) {
- errorOrDestroy(stream, er);
- process.nextTick(cb, er);
- return false;
- }
-
- return true;
-}
-
-Writable.prototype.write = function (chunk, encoding, cb) {
- var state = this._writableState;
- var ret = false;
-
- var isBuf = !state.objectMode && _isUint8Array(chunk);
-
- if (isBuf && !Buffer.isBuffer(chunk)) {
- chunk = _uint8ArrayToBuffer(chunk);
- }
-
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = null;
- }
-
- if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
- if (typeof cb !== 'function') cb = nop;
- if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
- state.pendingcb++;
- ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
- }
- return ret;
-};
-
-Writable.prototype.cork = function () {
- this._writableState.corked++;
-};
-
-Writable.prototype.uncork = function () {
- var state = this._writableState;
-
- if (state.corked) {
- state.corked--;
- if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
- }
-};
-
-Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
- // node::ParseEncoding() requires lower case.
- if (typeof encoding === 'string') encoding = encoding.toLowerCase();
- if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
- this._writableState.defaultEncoding = encoding;
- return this;
-};
-
-Object.defineProperty(Writable.prototype, 'writableBuffer', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState && this._writableState.getBuffer();
- }
-});
-
-function decodeChunk(state, chunk, encoding) {
- if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
- chunk = Buffer.from(chunk, encoding);
- }
-
- return chunk;
-}
-
-Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState.highWaterMark;
- }
-}); // if we're already writing something, then just put this
-// in the queue, and wait our turn. Otherwise, call _write
-// If we return false, then we need a drain event, so set that flag.
-
-function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
- if (!isBuf) {
- var newChunk = decodeChunk(state, chunk, encoding);
-
- if (chunk !== newChunk) {
- isBuf = true;
- encoding = 'buffer';
- chunk = newChunk;
- }
- }
-
- var len = state.objectMode ? 1 : chunk.length;
- state.length += len;
- var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false.
-
- if (!ret) state.needDrain = true;
-
- if (state.writing || state.corked) {
- var last = state.lastBufferedRequest;
- state.lastBufferedRequest = {
- chunk: chunk,
- encoding: encoding,
- isBuf: isBuf,
- callback: cb,
- next: null
- };
-
- if (last) {
- last.next = state.lastBufferedRequest;
- } else {
- state.bufferedRequest = state.lastBufferedRequest;
- }
-
- state.bufferedRequestCount += 1;
- } else {
- doWrite(stream, state, false, len, chunk, encoding, cb);
- }
-
- return ret;
-}
-
-function doWrite(stream, state, writev, len, chunk, encoding, cb) {
- state.writelen = len;
- state.writecb = cb;
- state.writing = true;
- state.sync = true;
- if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
- state.sync = false;
-}
-
-function onwriteError(stream, state, sync, er, cb) {
- --state.pendingcb;
-
- if (sync) {
- // defer the callback if we are being called synchronously
- // to avoid piling up things on the stack
- process.nextTick(cb, er); // this can emit finish, and it will always happen
- // after error
-
- process.nextTick(finishMaybe, stream, state);
- stream._writableState.errorEmitted = true;
- errorOrDestroy(stream, er);
- } else {
- // the caller expect this to happen before if
- // it is async
- cb(er);
- stream._writableState.errorEmitted = true;
- errorOrDestroy(stream, er); // this can emit finish, but finish must
- // always follow error
-
- finishMaybe(stream, state);
- }
-}
-
-function onwriteStateUpdate(state) {
- state.writing = false;
- state.writecb = null;
- state.length -= state.writelen;
- state.writelen = 0;
-}
-
-function onwrite(stream, er) {
- var state = stream._writableState;
- var sync = state.sync;
- var cb = state.writecb;
- if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();
- onwriteStateUpdate(state);
- if (er) onwriteError(stream, state, sync, er, cb);else {
- // Check if we're actually ready to finish, but don't emit yet
- var finished = needFinish(state) || stream.destroyed;
-
- if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
- clearBuffer(stream, state);
- }
-
- if (sync) {
- process.nextTick(afterWrite, stream, state, finished, cb);
- } else {
- afterWrite(stream, state, finished, cb);
- }
- }
-}
-
-function afterWrite(stream, state, finished, cb) {
- if (!finished) onwriteDrain(stream, state);
- state.pendingcb--;
- cb();
- finishMaybe(stream, state);
-} // Must force callback to be called on nextTick, so that we don't
-// emit 'drain' before the write() consumer gets the 'false' return
-// value, and has a chance to attach a 'drain' listener.
-
-
-function onwriteDrain(stream, state) {
- if (state.length === 0 && state.needDrain) {
- state.needDrain = false;
- stream.emit('drain');
- }
-} // if there's something in the buffer waiting, then process it
-
-
-function clearBuffer(stream, state) {
- state.bufferProcessing = true;
- var entry = state.bufferedRequest;
-
- if (stream._writev && entry && entry.next) {
- // Fast case, write everything using _writev()
- var l = state.bufferedRequestCount;
- var buffer = new Array(l);
- var holder = state.corkedRequestsFree;
- holder.entry = entry;
- var count = 0;
- var allBuffers = true;
-
- while (entry) {
- buffer[count] = entry;
- if (!entry.isBuf) allBuffers = false;
- entry = entry.next;
- count += 1;
- }
-
- buffer.allBuffers = allBuffers;
- doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time
- // as the hot path ends with doWrite
-
- state.pendingcb++;
- state.lastBufferedRequest = null;
-
- if (holder.next) {
- state.corkedRequestsFree = holder.next;
- holder.next = null;
- } else {
- state.corkedRequestsFree = new CorkedRequest(state);
- }
-
- state.bufferedRequestCount = 0;
- } else {
- // Slow case, write chunks one-by-one
- while (entry) {
- var chunk = entry.chunk;
- var encoding = entry.encoding;
- var cb = entry.callback;
- var len = state.objectMode ? 1 : chunk.length;
- doWrite(stream, state, false, len, chunk, encoding, cb);
- entry = entry.next;
- state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then
- // it means that we need to wait until it does.
- // also, that means that the chunk and cb are currently
- // being processed, so move the buffer counter past them.
-
- if (state.writing) {
- break;
- }
- }
-
- if (entry === null) state.lastBufferedRequest = null;
- }
-
- state.bufferedRequest = entry;
- state.bufferProcessing = false;
-}
-
-Writable.prototype._write = function (chunk, encoding, cb) {
- cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));
-};
-
-Writable.prototype._writev = null;
-
-Writable.prototype.end = function (chunk, encoding, cb) {
- var state = this._writableState;
-
- if (typeof chunk === 'function') {
- cb = chunk;
- chunk = null;
- encoding = null;
- } else if (typeof encoding === 'function') {
- cb = encoding;
- encoding = null;
- }
-
- if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks
-
- if (state.corked) {
- state.corked = 1;
- this.uncork();
- } // ignore unnecessary end() calls.
-
-
- if (!state.ending) endWritable(this, state, cb);
- return this;
-};
-
-Object.defineProperty(Writable.prototype, 'writableLength', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- return this._writableState.length;
- }
-});
-
-function needFinish(state) {
- return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
-}
-
-function callFinal(stream, state) {
- stream._final(function (err) {
- state.pendingcb--;
-
- if (err) {
- errorOrDestroy(stream, err);
- }
-
- state.prefinished = true;
- stream.emit('prefinish');
- finishMaybe(stream, state);
- });
-}
-
-function prefinish(stream, state) {
- if (!state.prefinished && !state.finalCalled) {
- if (typeof stream._final === 'function' && !state.destroyed) {
- state.pendingcb++;
- state.finalCalled = true;
- process.nextTick(callFinal, stream, state);
- } else {
- state.prefinished = true;
- stream.emit('prefinish');
- }
- }
-}
-
-function finishMaybe(stream, state) {
- var need = needFinish(state);
-
- if (need) {
- prefinish(stream, state);
-
- if (state.pendingcb === 0) {
- state.finished = true;
- stream.emit('finish');
-
- if (state.autoDestroy) {
- // In case of duplex streams we need a way to detect
- // if the readable side is ready for autoDestroy as well
- var rState = stream._readableState;
-
- if (!rState || rState.autoDestroy && rState.endEmitted) {
- stream.destroy();
- }
- }
- }
- }
-
- return need;
-}
-
-function endWritable(stream, state, cb) {
- state.ending = true;
- finishMaybe(stream, state);
-
- if (cb) {
- if (state.finished) process.nextTick(cb);else stream.once('finish', cb);
- }
-
- state.ended = true;
- stream.writable = false;
-}
-
-function onCorkedFinish(corkReq, state, err) {
- var entry = corkReq.entry;
- corkReq.entry = null;
-
- while (entry) {
- var cb = entry.callback;
- state.pendingcb--;
- cb(err);
- entry = entry.next;
- } // reuse the free corkReq.
-
-
- state.corkedRequestsFree.next = corkReq;
-}
-
-Object.defineProperty(Writable.prototype, 'destroyed', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function get() {
- if (this._writableState === undefined) {
- return false;
- }
-
- return this._writableState.destroyed;
- },
- set: function set(value) {
- // we ignore the value if the stream
- // has not been initialized yet
- if (!this._writableState) {
- return;
- } // backward compatibility, the user is explicitly
- // managing destroyed
-
-
- this._writableState.destroyed = value;
- }
-});
-Writable.prototype.destroy = destroyImpl.destroy;
-Writable.prototype._undestroy = destroyImpl.undestroy;
-
-Writable.prototype._destroy = function (err, cb) {
- cb(err);
-};
\ No newline at end of file
+module.exports = require('./stream').Writable
diff --git a/lib/internal/streams/add-abort-signal.js b/lib/internal/streams/add-abort-signal.js
new file mode 100644
index 0000000000..8d5a840f70
--- /dev/null
+++ b/lib/internal/streams/add-abort-signal.js
@@ -0,0 +1,52 @@
+'use strict'
+
+const { AbortError, codes } = require('../../ours/errors')
+
+const eos = require('./end-of-stream')
+
+const { ERR_INVALID_ARG_TYPE } = codes // This method is inlined here for readable-stream
+// It also does not allow for signal to not exist on the stream
+// https://github.com/nodejs/node/pull/36061#discussion_r533718029
+
+const validateAbortSignal = (signal, name) => {
+ if (typeof signal !== 'object' || !('aborted' in signal)) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
+ }
+}
+
+function isNodeStream(obj) {
+ return !!(obj && typeof obj.pipe === 'function')
+}
+
+module.exports.addAbortSignal = function addAbortSignal(signal, stream) {
+ validateAbortSignal(signal, 'signal')
+
+ if (!isNodeStream(stream)) {
+ throw new ERR_INVALID_ARG_TYPE('stream', 'stream.Stream', stream)
+ }
+
+ return module.exports.addAbortSignalNoValidate(signal, stream)
+}
+
+module.exports.addAbortSignalNoValidate = function (signal, stream) {
+ if (typeof signal !== 'object' || !('aborted' in signal)) {
+ return stream
+ }
+
+ const onAbort = () => {
+ stream.destroy(
+ new AbortError(undefined, {
+ cause: signal.reason
+ })
+ )
+ }
+
+ if (signal.aborted) {
+ onAbort()
+ } else {
+ signal.addEventListener('abort', onAbort)
+ eos(stream, () => signal.removeEventListener('abort', onAbort))
+ }
+
+ return stream
+}
diff --git a/lib/internal/streams/async_iterator.js b/lib/internal/streams/async_iterator.js
deleted file mode 100644
index 9fb615a2f3..0000000000
--- a/lib/internal/streams/async_iterator.js
+++ /dev/null
@@ -1,207 +0,0 @@
-'use strict';
-
-var _Object$setPrototypeO;
-
-function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
-
-var finished = require('./end-of-stream');
-
-var kLastResolve = Symbol('lastResolve');
-var kLastReject = Symbol('lastReject');
-var kError = Symbol('error');
-var kEnded = Symbol('ended');
-var kLastPromise = Symbol('lastPromise');
-var kHandlePromise = Symbol('handlePromise');
-var kStream = Symbol('stream');
-
-function createIterResult(value, done) {
- return {
- value: value,
- done: done
- };
-}
-
-function readAndResolve(iter) {
- var resolve = iter[kLastResolve];
-
- if (resolve !== null) {
- var data = iter[kStream].read(); // we defer if data is null
- // we can be expecting either 'end' or
- // 'error'
-
- if (data !== null) {
- iter[kLastPromise] = null;
- iter[kLastResolve] = null;
- iter[kLastReject] = null;
- resolve(createIterResult(data, false));
- }
- }
-}
-
-function onReadable(iter) {
- // we wait for the next tick, because it might
- // emit an error with process.nextTick
- process.nextTick(readAndResolve, iter);
-}
-
-function wrapForNext(lastPromise, iter) {
- return function (resolve, reject) {
- lastPromise.then(function () {
- if (iter[kEnded]) {
- resolve(createIterResult(undefined, true));
- return;
- }
-
- iter[kHandlePromise](resolve, reject);
- }, reject);
- };
-}
-
-var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
-var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {
- get stream() {
- return this[kStream];
- },
-
- next: function next() {
- var _this = this;
-
- // if we have detected an error in the meanwhile
- // reject straight away
- var error = this[kError];
-
- if (error !== null) {
- return Promise.reject(error);
- }
-
- if (this[kEnded]) {
- return Promise.resolve(createIterResult(undefined, true));
- }
-
- if (this[kStream].destroyed) {
- // We need to defer via nextTick because if .destroy(err) is
- // called, the error will be emitted via nextTick, and
- // we cannot guarantee that there is no error lingering around
- // waiting to be emitted.
- return new Promise(function (resolve, reject) {
- process.nextTick(function () {
- if (_this[kError]) {
- reject(_this[kError]);
- } else {
- resolve(createIterResult(undefined, true));
- }
- });
- });
- } // if we have multiple next() calls
- // we will wait for the previous Promise to finish
- // this logic is optimized to support for await loops,
- // where next() is only called once at a time
-
-
- var lastPromise = this[kLastPromise];
- var promise;
-
- if (lastPromise) {
- promise = new Promise(wrapForNext(lastPromise, this));
- } else {
- // fast path needed to support multiple this.push()
- // without triggering the next() queue
- var data = this[kStream].read();
-
- if (data !== null) {
- return Promise.resolve(createIterResult(data, false));
- }
-
- promise = new Promise(this[kHandlePromise]);
- }
-
- this[kLastPromise] = promise;
- return promise;
- }
-}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {
- return this;
-}), _defineProperty(_Object$setPrototypeO, "return", function _return() {
- var _this2 = this;
-
- // destroy(err, cb) is a private API
- // we can guarantee we have that here, because we control the
- // Readable class this is attached to
- return new Promise(function (resolve, reject) {
- _this2[kStream].destroy(null, function (err) {
- if (err) {
- reject(err);
- return;
- }
-
- resolve(createIterResult(undefined, true));
- });
- });
-}), _Object$setPrototypeO), AsyncIteratorPrototype);
-
-var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) {
- var _Object$create;
-
- var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {
- value: stream,
- writable: true
- }), _defineProperty(_Object$create, kLastResolve, {
- value: null,
- writable: true
- }), _defineProperty(_Object$create, kLastReject, {
- value: null,
- writable: true
- }), _defineProperty(_Object$create, kError, {
- value: null,
- writable: true
- }), _defineProperty(_Object$create, kEnded, {
- value: stream._readableState.endEmitted,
- writable: true
- }), _defineProperty(_Object$create, kHandlePromise, {
- value: function value(resolve, reject) {
- var data = iterator[kStream].read();
-
- if (data) {
- iterator[kLastPromise] = null;
- iterator[kLastResolve] = null;
- iterator[kLastReject] = null;
- resolve(createIterResult(data, false));
- } else {
- iterator[kLastResolve] = resolve;
- iterator[kLastReject] = reject;
- }
- },
- writable: true
- }), _Object$create));
- iterator[kLastPromise] = null;
- finished(stream, function (err) {
- if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
- var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise
- // returned by next() and store the error
-
- if (reject !== null) {
- iterator[kLastPromise] = null;
- iterator[kLastResolve] = null;
- iterator[kLastReject] = null;
- reject(err);
- }
-
- iterator[kError] = err;
- return;
- }
-
- var resolve = iterator[kLastResolve];
-
- if (resolve !== null) {
- iterator[kLastPromise] = null;
- iterator[kLastResolve] = null;
- iterator[kLastReject] = null;
- resolve(createIterResult(undefined, true));
- }
-
- iterator[kEnded] = true;
- });
- stream.on('readable', onReadable.bind(null, iterator));
- return iterator;
-};
-
-module.exports = createReadableStreamAsyncIterator;
\ No newline at end of file
diff --git a/lib/internal/streams/buffer_list.js b/lib/internal/streams/buffer_list.js
index cdea425f19..6a37442bae 100644
--- a/lib/internal/streams/buffer_list.js
+++ b/lib/internal/streams/buffer_list.js
@@ -1,210 +1,178 @@
-'use strict';
+'use strict'
-function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
+const { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = require('../../ours/primordials')
-function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
+const { inspect } = require('../../ours/util')
-function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+module.exports = class BufferList {
+ constructor() {
+ this.head = null
+ this.tail = null
+ this.length = 0
+ }
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+ push(v) {
+ const entry = {
+ data: v,
+ next: null
+ }
+ if (this.length > 0) this.tail.next = entry
+ else this.head = entry
+ this.tail = entry
+ ++this.length
+ }
-var _require = require('buffer'),
- Buffer = _require.Buffer;
+ unshift(v) {
+ const entry = {
+ data: v,
+ next: this.head
+ }
+ if (this.length === 0) this.tail = entry
+ this.head = entry
+ ++this.length
+ }
-var _require2 = require('util'),
- inspect = _require2.inspect;
+ shift() {
+ if (this.length === 0) return
+ const ret = this.head.data
+ if (this.length === 1) this.head = this.tail = null
+ else this.head = this.head.next
+ --this.length
+ return ret
+ }
-var custom = inspect && inspect.custom || 'inspect';
+ clear() {
+ this.head = this.tail = null
+ this.length = 0
+ }
-function copyBuffer(src, target, offset) {
- Buffer.prototype.copy.call(src, target, offset);
-}
+ join(s) {
+ if (this.length === 0) return ''
+ let p = this.head
+ let ret = '' + p.data
-module.exports =
-/*#__PURE__*/
-function () {
- function BufferList() {
- _classCallCheck(this, BufferList);
+ while ((p = p.next) !== null) ret += s + p.data
- this.head = null;
- this.tail = null;
- this.length = 0;
+ return ret
}
- _createClass(BufferList, [{
- key: "push",
- value: function push(v) {
- var entry = {
- data: v,
- next: null
- };
- if (this.length > 0) this.tail.next = entry;else this.head = entry;
- this.tail = entry;
- ++this.length;
- }
- }, {
- key: "unshift",
- value: function unshift(v) {
- var entry = {
- data: v,
- next: this.head
- };
- if (this.length === 0) this.tail = entry;
- this.head = entry;
- ++this.length;
- }
- }, {
- key: "shift",
- value: function shift() {
- if (this.length === 0) return;
- var ret = this.head.data;
- if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
- --this.length;
- return ret;
- }
- }, {
- key: "clear",
- value: function clear() {
- this.head = this.tail = null;
- this.length = 0;
+ concat(n) {
+ if (this.length === 0) return Buffer.alloc(0)
+ const ret = Buffer.allocUnsafe(n >>> 0)
+ let p = this.head
+ let i = 0
+
+ while (p) {
+ TypedArrayPrototypeSet(ret, p.data, i)
+ i += p.data.length
+ p = p.next
}
- }, {
- key: "join",
- value: function join(s) {
- if (this.length === 0) return '';
- var p = this.head;
- var ret = '' + p.data;
-
- while (p = p.next) {
- ret += s + p.data;
- }
- return ret;
+ return ret
+ } // Consumes a specified amount of bytes or characters from the buffered data.
+
+ consume(n, hasStrings) {
+ const data = this.head.data
+
+ if (n < data.length) {
+ // `slice` is the same for buffers and strings.
+ const slice = data.slice(0, n)
+ this.head.data = data.slice(n)
+ return slice
}
- }, {
- key: "concat",
- value: function concat(n) {
- if (this.length === 0) return Buffer.alloc(0);
- var ret = Buffer.allocUnsafe(n >>> 0);
- var p = this.head;
- var i = 0;
-
- while (p) {
- copyBuffer(p.data, ret, i);
- i += p.data.length;
- p = p.next;
- }
- return ret;
- } // Consumes a specified amount of bytes or characters from the buffered data.
-
- }, {
- key: "consume",
- value: function consume(n, hasStrings) {
- var ret;
-
- if (n < this.head.data.length) {
- // `slice` is the same for buffers and strings.
- ret = this.head.data.slice(0, n);
- this.head.data = this.head.data.slice(n);
- } else if (n === this.head.data.length) {
- // First chunk is a perfect match.
- ret = this.shift();
- } else {
- // Result spans more than one buffer.
- ret = hasStrings ? this._getString(n) : this._getBuffer(n);
- }
+ if (n === data.length) {
+ // First chunk is a perfect match.
+ return this.shift()
+ } // Result spans more than one buffer.
+
+ return hasStrings ? this._getString(n) : this._getBuffer(n)
+ }
+
+ first() {
+ return this.head.data
+ }
- return ret;
+ *[SymbolIterator]() {
+ for (let p = this.head; p; p = p.next) {
+ yield p.data
}
- }, {
- key: "first",
- value: function first() {
- return this.head.data;
- } // Consumes a specified amount of characters from the buffered data.
-
- }, {
- key: "_getString",
- value: function _getString(n) {
- var p = this.head;
- var c = 1;
- var ret = p.data;
- n -= ret.length;
-
- while (p = p.next) {
- var str = p.data;
- var nb = n > str.length ? str.length : n;
- if (nb === str.length) ret += str;else ret += str.slice(0, n);
- n -= nb;
-
- if (n === 0) {
- if (nb === str.length) {
- ++c;
- if (p.next) this.head = p.next;else this.head = this.tail = null;
- } else {
- this.head = p;
- p.data = str.slice(nb);
- }
-
- break;
+ } // Consumes a specified amount of characters from the buffered data.
+
+ _getString(n) {
+ let ret = ''
+ let p = this.head
+ let c = 0
+
+ do {
+ const str = p.data
+
+ if (n > str.length) {
+ ret += str
+ n -= str.length
+ } else {
+ if (n === str.length) {
+ ret += str
+ ++c
+ if (p.next) this.head = p.next
+ else this.head = this.tail = null
+ } else {
+ ret += StringPrototypeSlice(str, 0, n)
+ this.head = p
+ p.data = StringPrototypeSlice(str, n)
}
- ++c;
+ break
}
- this.length -= c;
- return ret;
- } // Consumes a specified amount of bytes from the buffered data.
-
- }, {
- key: "_getBuffer",
- value: function _getBuffer(n) {
- var ret = Buffer.allocUnsafe(n);
- var p = this.head;
- var c = 1;
- p.data.copy(ret);
- n -= p.data.length;
-
- while (p = p.next) {
- var buf = p.data;
- var nb = n > buf.length ? buf.length : n;
- buf.copy(ret, ret.length - n, 0, nb);
- n -= nb;
-
- if (n === 0) {
- if (nb === buf.length) {
- ++c;
- if (p.next) this.head = p.next;else this.head = this.tail = null;
- } else {
- this.head = p;
- p.data = buf.slice(nb);
- }
-
- break;
+ ++c
+ } while ((p = p.next) !== null)
+
+ this.length -= c
+ return ret
+ } // Consumes a specified amount of bytes from the buffered data.
+
+ _getBuffer(n) {
+ const ret = Buffer.allocUnsafe(n)
+ const retLen = n
+ let p = this.head
+ let c = 0
+
+ do {
+ const buf = p.data
+
+ if (n > buf.length) {
+ TypedArrayPrototypeSet(ret, buf, retLen - n)
+ n -= buf.length
+ } else {
+ if (n === buf.length) {
+ TypedArrayPrototypeSet(ret, buf, retLen - n)
+ ++c
+ if (p.next) this.head = p.next
+ else this.head = this.tail = null
+ } else {
+ TypedArrayPrototypeSet(ret, new Uint8Array(buf.buffer, buf.byteOffset, n), retLen - n)
+ this.head = p
+ p.data = buf.slice(n)
}
- ++c;
+ break
}
- this.length -= c;
- return ret;
- } // Make sure the linked list only shows the minimal necessary information.
-
- }, {
- key: custom,
- value: function value(_, options) {
- return inspect(this, _objectSpread({}, options, {
- // Only inspect one level.
- depth: 0,
- // It should not recurse.
- customInspect: false
- }));
- }
- }]);
-
- return BufferList;
-}();
\ No newline at end of file
+ ++c
+ } while ((p = p.next) !== null)
+
+ this.length -= c
+ return ret
+ } // Make sure the linked list only shows the minimal necessary information.
+
+ [Symbol.for('nodejs.util.inspect.custom')](_, options) {
+ return inspect(this, {
+ ...options,
+ // Only inspect one level.
+ depth: 0,
+ // It should not recurse.
+ customInspect: false
+ })
+ }
+}
diff --git a/lib/internal/streams/compose.js b/lib/internal/streams/compose.js
new file mode 100644
index 0000000000..0a2e810a3e
--- /dev/null
+++ b/lib/internal/streams/compose.js
@@ -0,0 +1,161 @@
+'use strict'
+
+const { pipeline } = require('./pipeline')
+
+const Duplex = require('./duplex')
+
+const { destroyer } = require('./destroy')
+
+const { isNodeStream, isReadable, isWritable } = require('./utils')
+
+const {
+ AbortError,
+ codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS }
+} = require('../../ours/errors')
+
+module.exports = function compose(...streams) {
+ if (streams.length === 0) {
+ throw new ERR_MISSING_ARGS('streams')
+ }
+
+ if (streams.length === 1) {
+ return Duplex.from(streams[0])
+ }
+
+ const orgStreams = [...streams]
+
+ if (typeof streams[0] === 'function') {
+ streams[0] = Duplex.from(streams[0])
+ }
+
+ if (typeof streams[streams.length - 1] === 'function') {
+ const idx = streams.length - 1
+ streams[idx] = Duplex.from(streams[idx])
+ }
+
+ for (let n = 0; n < streams.length; ++n) {
+ if (!isNodeStream(streams[n])) {
+ // TODO(ronag): Add checks for non streams.
+ continue
+ }
+
+ if (n < streams.length - 1 && !isReadable(streams[n])) {
+ throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable')
+ }
+
+ if (n > 0 && !isWritable(streams[n])) {
+ throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable')
+ }
+ }
+
+ let ondrain
+ let onfinish
+ let onreadable
+ let onclose
+ let d
+
+ function onfinished(err) {
+ const cb = onclose
+ onclose = null
+
+ if (cb) {
+ cb(err)
+ } else if (err) {
+ d.destroy(err)
+ } else if (!readable && !writable) {
+ d.destroy()
+ }
+ }
+
+ const head = streams[0]
+ const tail = pipeline(streams, onfinished)
+ const writable = !!isWritable(head)
+ const readable = !!isReadable(tail) // TODO(ronag): Avoid double buffering.
+ // Implement Writable/Readable/Duplex traits.
+ // See, https://github.com/nodejs/node/pull/33515.
+
+ d = new Duplex({
+ // TODO (ronag): highWaterMark?
+ writableObjectMode: !!(head !== null && head !== undefined && head.writableObjectMode),
+ readableObjectMode: !!(tail !== null && tail !== undefined && tail.writableObjectMode),
+ writable,
+ readable
+ })
+
+ if (writable) {
+ d._write = function (chunk, encoding, callback) {
+ if (head.write(chunk, encoding)) {
+ callback()
+ } else {
+ ondrain = callback
+ }
+ }
+
+ d._final = function (callback) {
+ head.end()
+ onfinish = callback
+ }
+
+ head.on('drain', function () {
+ if (ondrain) {
+ const cb = ondrain
+ ondrain = null
+ cb()
+ }
+ })
+ tail.on('finish', function () {
+ if (onfinish) {
+ const cb = onfinish
+ onfinish = null
+ cb()
+ }
+ })
+ }
+
+ if (readable) {
+ tail.on('readable', function () {
+ if (onreadable) {
+ const cb = onreadable
+ onreadable = null
+ cb()
+ }
+ })
+ tail.on('end', function () {
+ d.push(null)
+ })
+
+ d._read = function () {
+ while (true) {
+ const buf = tail.read()
+
+ if (buf === null) {
+ onreadable = d._read
+ return
+ }
+
+ if (!d.push(buf)) {
+ return
+ }
+ }
+ }
+ }
+
+ d._destroy = function (err, callback) {
+ if (!err && onclose !== null) {
+ err = new AbortError()
+ }
+
+ onreadable = null
+ ondrain = null
+ onfinish = null
+
+ if (onclose === null) {
+ callback(err)
+ } else {
+ onclose = callback
+ destroyer(tail, err)
+ }
+ }
+
+ return d
+}
diff --git a/lib/internal/streams/destroy.js b/lib/internal/streams/destroy.js
index 3268a16f3b..7eeda13615 100644
--- a/lib/internal/streams/destroy.js
+++ b/lib/internal/streams/destroy.js
@@ -1,105 +1,332 @@
-'use strict'; // undocumented cb() API, needed for core, not for public API
+'use strict'
+
+const {
+ aggregateTwoErrors,
+ codes: { ERR_MULTIPLE_CALLBACK },
+ AbortError
+} = require('../../ours/errors')
+
+const { Symbol } = require('../../ours/primordials')
+
+const { kDestroyed, isDestroyed, isFinished, isServerRequest } = require('./utils')
+
+const kDestroy = Symbol('kDestroy')
+const kConstruct = Symbol('kConstruct')
+
+function checkError(err, w, r) {
+ if (err) {
+ // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
+ err.stack // eslint-disable-line no-unused-expressions
+
+ if (w && !w.errored) {
+ w.errored = err
+ }
+
+ if (r && !r.errored) {
+ r.errored = err
+ }
+ }
+} // Backwards compat. cb() is undocumented and unused in core but
+// unfortunately might be used by modules.
function destroy(err, cb) {
- var _this = this;
+ const r = this._readableState
+ const w = this._writableState // With duplex streams we use the writable side for state.
- var readableDestroyed = this._readableState && this._readableState.destroyed;
- var writableDestroyed = this._writableState && this._writableState.destroyed;
+ const s = w || r
- if (readableDestroyed || writableDestroyed) {
- if (cb) {
- cb(err);
- } else if (err) {
- if (!this._writableState) {
- process.nextTick(emitErrorNT, this, err);
- } else if (!this._writableState.errorEmitted) {
- this._writableState.errorEmitted = true;
- process.nextTick(emitErrorNT, this, err);
- }
+ if ((w && w.destroyed) || (r && r.destroyed)) {
+ if (typeof cb === 'function') {
+ cb()
}
- return this;
- } // we set destroyed to true before firing error callbacks in order
+ return this
+ } // We set destroyed to true before firing error callbacks in order
// to make it re-entrance safe in case destroy() is called within callbacks
+ checkError(err, w, r)
- if (this._readableState) {
- this._readableState.destroyed = true;
- } // if this is a duplex stream mark the writable part as destroyed as well
+ if (w) {
+ w.destroyed = true
+ }
+ if (r) {
+ r.destroyed = true
+ } // If still constructing then defer calling _destroy.
- if (this._writableState) {
- this._writableState.destroyed = true;
+ if (!s.constructed) {
+ this.once(kDestroy, function (er) {
+ _destroy(this, aggregateTwoErrors(er, err), cb)
+ })
+ } else {
+ _destroy(this, err, cb)
}
- this._destroy(err || null, function (err) {
- if (!cb && err) {
- if (!_this._writableState) {
- process.nextTick(emitErrorAndCloseNT, _this, err);
- } else if (!_this._writableState.errorEmitted) {
- _this._writableState.errorEmitted = true;
- process.nextTick(emitErrorAndCloseNT, _this, err);
- } else {
- process.nextTick(emitCloseNT, _this);
- }
- } else if (cb) {
- process.nextTick(emitCloseNT, _this);
- cb(err);
+ return this
+}
+
+function _destroy(self, err, cb) {
+ let called = false
+
+ function onDestroy(err) {
+ if (called) {
+ return
+ }
+
+ called = true
+ const r = self._readableState
+ const w = self._writableState
+ checkError(err, w, r)
+
+ if (w) {
+ w.closed = true
+ }
+
+ if (r) {
+ r.closed = true
+ }
+
+ if (typeof cb === 'function') {
+ cb(err)
+ }
+
+ if (err) {
+ process.nextTick(emitErrorCloseNT, self, err)
} else {
- process.nextTick(emitCloseNT, _this);
+ process.nextTick(emitCloseNT, self)
}
- });
+ }
- return this;
+ try {
+ self._destroy(err || null, onDestroy)
+ } catch (err) {
+ onDestroy(err)
+ }
}
-function emitErrorAndCloseNT(self, err) {
- emitErrorNT(self, err);
- emitCloseNT(self);
+function emitErrorCloseNT(self, err) {
+ emitErrorNT(self, err)
+ emitCloseNT(self)
}
function emitCloseNT(self) {
- if (self._writableState && !self._writableState.emitClose) return;
- if (self._readableState && !self._readableState.emitClose) return;
- self.emit('close');
-}
+ const r = self._readableState
+ const w = self._writableState
-function undestroy() {
- if (this._readableState) {
- this._readableState.destroyed = false;
- this._readableState.reading = false;
- this._readableState.ended = false;
- this._readableState.endEmitted = false;
- }
-
- if (this._writableState) {
- this._writableState.destroyed = false;
- this._writableState.ended = false;
- this._writableState.ending = false;
- this._writableState.finalCalled = false;
- this._writableState.prefinished = false;
- this._writableState.finished = false;
- this._writableState.errorEmitted = false;
+ if (w) {
+ w.closeEmitted = true
+ }
+
+ if (r) {
+ r.closeEmitted = true
+ }
+
+ if ((w && w.emitClose) || (r && r.emitClose)) {
+ self.emit('close')
}
}
function emitErrorNT(self, err) {
- self.emit('error', err);
+ const r = self._readableState
+ const w = self._writableState
+
+ if ((w && w.errorEmitted) || (r && r.errorEmitted)) {
+ return
+ }
+
+ if (w) {
+ w.errorEmitted = true
+ }
+
+ if (r) {
+ r.errorEmitted = true
+ }
+
+ self.emit('error', err)
}
-function errorOrDestroy(stream, err) {
+function undestroy() {
+ const r = this._readableState
+ const w = this._writableState
+
+ if (r) {
+ r.constructed = true
+ r.closed = false
+ r.closeEmitted = false
+ r.destroyed = false
+ r.errored = null
+ r.errorEmitted = false
+ r.reading = false
+ r.ended = r.readable === false
+ r.endEmitted = r.readable === false
+ }
+
+ if (w) {
+ w.constructed = true
+ w.destroyed = false
+ w.closed = false
+ w.closeEmitted = false
+ w.errored = null
+ w.errorEmitted = false
+ w.finalCalled = false
+ w.prefinished = false
+ w.ended = w.writable === false
+ w.ending = w.writable === false
+ w.finished = w.writable === false
+ }
+}
+
+function errorOrDestroy(stream, err, sync) {
// We have tests that rely on errors being emitted
// in the same tick, so changing this is semver major.
// For now when you opt-in to autoDestroy we allow
// the error to be emitted nextTick. In a future
// semver major update we should change the default to this.
- var rState = stream._readableState;
- var wState = stream._writableState;
- if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);
+ const r = stream._readableState
+ const w = stream._writableState
+
+ if ((w && w.destroyed) || (r && r.destroyed)) {
+ return this
+ }
+
+ if ((r && r.autoDestroy) || (w && w.autoDestroy)) stream.destroy(err)
+ else if (err) {
+ // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
+ err.stack // eslint-disable-line no-unused-expressions
+
+ if (w && !w.errored) {
+ w.errored = err
+ }
+
+ if (r && !r.errored) {
+ r.errored = err
+ }
+
+ if (sync) {
+ process.nextTick(emitErrorNT, stream, err)
+ } else {
+ emitErrorNT(stream, err)
+ }
+ }
+}
+
+function construct(stream, cb) {
+ if (typeof stream._construct !== 'function') {
+ return
+ }
+
+ const r = stream._readableState
+ const w = stream._writableState
+
+ if (r) {
+ r.constructed = false
+ }
+
+ if (w) {
+ w.constructed = false
+ }
+
+ stream.once(kConstruct, cb)
+
+ if (stream.listenerCount(kConstruct) > 1) {
+ // Duplex
+ return
+ }
+
+ process.nextTick(constructNT, stream)
+}
+
+function constructNT(stream) {
+ let called = false
+
+ function onConstruct(err) {
+ if (called) {
+ errorOrDestroy(stream, err !== null && err !== undefined ? err : new ERR_MULTIPLE_CALLBACK())
+ return
+ }
+
+ called = true
+ const r = stream._readableState
+ const w = stream._writableState
+ const s = w || r
+
+ if (r) {
+ r.constructed = true
+ }
+
+ if (w) {
+ w.constructed = true
+ }
+
+ if (s.destroyed) {
+ stream.emit(kDestroy, err)
+ } else if (err) {
+ errorOrDestroy(stream, err, true)
+ } else {
+ process.nextTick(emitConstructNT, stream)
+ }
+ }
+
+ try {
+ stream._construct(onConstruct)
+ } catch (err) {
+ onConstruct(err)
+ }
+}
+
+function emitConstructNT(stream) {
+ stream.emit(kConstruct)
+}
+
+function isRequest(stream) {
+ return stream && stream.setHeader && typeof stream.abort === 'function'
+}
+
+function emitCloseLegacy(stream) {
+ stream.emit('close')
+}
+
+function emitErrorCloseLegacy(stream, err) {
+ stream.emit('error', err)
+ process.nextTick(emitCloseLegacy, stream)
+} // Normalize destroy for legacy.
+
+function destroyer(stream, err) {
+ if (!stream || isDestroyed(stream)) {
+ return
+ }
+
+ if (!err && !isFinished(stream)) {
+ err = new AbortError()
+ } // TODO: Remove isRequest branches.
+
+ if (isServerRequest(stream)) {
+ stream.socket = null
+ stream.destroy(err)
+ } else if (isRequest(stream)) {
+ stream.abort()
+ } else if (isRequest(stream.req)) {
+ stream.req.abort()
+ } else if (typeof stream.destroy === 'function') {
+ stream.destroy(err)
+ } else if (typeof stream.close === 'function') {
+ // TODO: Don't lose err?
+ stream.close()
+ } else if (err) {
+ process.nextTick(emitErrorCloseLegacy, stream)
+ } else {
+ process.nextTick(emitCloseLegacy, stream)
+ }
+
+ if (!stream.destroyed) {
+ stream[kDestroyed] = true
+ }
}
module.exports = {
- destroy: destroy,
- undestroy: undestroy,
- errorOrDestroy: errorOrDestroy
-};
\ No newline at end of file
+ construct,
+ destroyer,
+ destroy,
+ undestroy,
+ errorOrDestroy
+}
diff --git a/lib/internal/streams/duplex.js b/lib/internal/streams/duplex.js
new file mode 100644
index 0000000000..a0d6f6bd73
--- /dev/null
+++ b/lib/internal/streams/duplex.js
@@ -0,0 +1,128 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+// a duplex stream is just a stream that is both readable and writable.
+// Since JS doesn't have multiple prototype inheritance, this class
+// prototypically inherits from Readable, and then parasitically from
+// Writable.
+'use strict'
+
+const {
+ ObjectDefineProperties,
+ ObjectGetOwnPropertyDescriptor,
+ ObjectKeys,
+ ObjectSetPrototypeOf
+} = require('../../ours/primordials')
+
+module.exports = Duplex
+
+const Readable = require('./readable')
+
+const Writable = require('./writable')
+
+ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype)
+ObjectSetPrototypeOf(Duplex, Readable)
+{
+ const keys = ObjectKeys(Writable.prototype) // Allow the keys array to be GC'ed.
+
+ for (let i = 0; i < keys.length; i++) {
+ const method = keys[i]
+ if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]
+ }
+}
+
+function Duplex(options) {
+ if (!(this instanceof Duplex)) return new Duplex(options)
+ Readable.call(this, options)
+ Writable.call(this, options)
+
+ if (options) {
+ this.allowHalfOpen = options.allowHalfOpen !== false
+
+ if (options.readable === false) {
+ this._readableState.readable = false
+ this._readableState.ended = true
+ this._readableState.endEmitted = true
+ }
+
+ if (options.writable === false) {
+ this._writableState.writable = false
+ this._writableState.ending = true
+ this._writableState.ended = true
+ this._writableState.finished = true
+ }
+ } else {
+ this.allowHalfOpen = true
+ }
+}
+
+ObjectDefineProperties(Duplex.prototype, {
+ writable: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable'),
+ writableHighWaterMark: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark'),
+ writableObjectMode: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode'),
+ writableBuffer: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer'),
+ writableLength: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength'),
+ writableFinished: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished'),
+ writableCorked: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked'),
+ writableEnded: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded'),
+ writableNeedDrain: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain'),
+ destroyed: {
+ get() {
+ if (this._readableState === undefined || this._writableState === undefined) {
+ return false
+ }
+
+ return this._readableState.destroyed && this._writableState.destroyed
+ },
+
+ set(value) {
+ // Backward compatibility, the user is explicitly
+ // managing destroyed.
+ if (this._readableState && this._writableState) {
+ this._readableState.destroyed = value
+ this._writableState.destroyed = value
+ }
+ }
+ }
+})
+let webStreamsAdapters // Lazy to avoid circular references
+
+function lazyWebStreams() {
+ if (webStreamsAdapters === undefined) webStreamsAdapters = {}
+ return webStreamsAdapters
+}
+
+Duplex.fromWeb = function (pair, options) {
+ return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options)
+}
+
+Duplex.toWeb = function (duplex) {
+ return lazyWebStreams().newReadableWritablePairFromDuplex(duplex)
+}
+
+let duplexify
+
+Duplex.from = function (body) {
+ if (!duplexify) {
+ duplexify = require('./duplexify')
+ }
+
+ return duplexify(body, 'body')
+}
diff --git a/lib/internal/streams/duplexify.js b/lib/internal/streams/duplexify.js
new file mode 100644
index 0000000000..5250ad0195
--- /dev/null
+++ b/lib/internal/streams/duplexify.js
@@ -0,0 +1,422 @@
+'use strict'
+
+const abortControllerModule = require('abort-controller')
+
+const bufferModule = require('buffer')
+
+const {
+ isReadable,
+ isWritable,
+ isIterable,
+ isNodeStream,
+ isReadableNodeStream,
+ isWritableNodeStream,
+ isDuplexNodeStream
+} = require('./utils')
+
+const eos = require('./end-of-stream')
+
+const {
+ AbortError,
+ codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE }
+} = require('../../ours/errors')
+
+const { destroyer } = require('./destroy')
+
+const Duplex = require('./duplex')
+
+const Readable = require('./readable')
+
+const { createDeferredPromise } = require('../../ours/util')
+
+const from = require('./from')
+
+const Blob = globalThis.Blob || bufferModule.Blob
+const isBlob =
+ typeof Blob !== 'undefined'
+ ? function isBlob(b) {
+ return b instanceof Blob
+ }
+ : function isBlob(b) {
+ return false
+ }
+const AbortController = globalThis.AbortController || abortControllerModule.AbortController
+
+const { FunctionPrototypeCall } = require('../../ours/primordials') // This is needed for pre node 17.
+
+class Duplexify extends Duplex {
+ constructor(options) {
+ super(options) // https://github.com/nodejs/node/pull/34385
+
+ if ((options === null || options === undefined ? undefined : options.readable) === false) {
+ this._readableState.readable = false
+ this._readableState.ended = true
+ this._readableState.endEmitted = true
+ }
+
+ if ((options === null || options === undefined ? undefined : options.writable) === false) {
+ this._writableState.writable = false
+ this._writableState.ending = true
+ this._writableState.ended = true
+ this._writableState.finished = true
+ }
+ }
+}
+
+module.exports = function duplexify(body, name) {
+ if (isDuplexNodeStream(body)) {
+ return body
+ }
+
+ if (isReadableNodeStream(body)) {
+ return _duplexify({
+ readable: body
+ })
+ }
+
+ if (isWritableNodeStream(body)) {
+ return _duplexify({
+ writable: body
+ })
+ }
+
+ if (isNodeStream(body)) {
+ return _duplexify({
+ writable: false,
+ readable: false
+ })
+ } // TODO: Webstreams
+ // if (isReadableStream(body)) {
+ // return _duplexify({ readable: Readable.fromWeb(body) });
+ // }
+ // TODO: Webstreams
+ // if (isWritableStream(body)) {
+ // return _duplexify({ writable: Writable.fromWeb(body) });
+ // }
+
+ if (typeof body === 'function') {
+ const { value, write, final, destroy } = fromAsyncGen(body)
+
+ if (isIterable(value)) {
+ return from(Duplexify, value, {
+ // TODO (ronag): highWaterMark?
+ objectMode: true,
+ write,
+ final,
+ destroy
+ })
+ }
+
+ const then = value === null || value === undefined ? undefined : value.then
+
+ if (typeof then === 'function') {
+ let d
+ const promise = FunctionPrototypeCall(
+ then,
+ value,
+ (val) => {
+ if (val != null) {
+ throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val)
+ }
+ },
+ (err) => {
+ destroyer(d, err)
+ }
+ )
+ return (d = new Duplexify({
+ // TODO (ronag): highWaterMark?
+ objectMode: true,
+ readable: false,
+ write,
+
+ final(cb) {
+ final(async () => {
+ try {
+ await promise
+ process.nextTick(cb, null)
+ } catch (err) {
+ process.nextTick(cb, err)
+ }
+ })
+ },
+
+ destroy
+ }))
+ }
+
+ throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or AsyncFunction', name, value)
+ }
+
+ if (isBlob(body)) {
+ return duplexify(body.arrayBuffer())
+ }
+
+ if (isIterable(body)) {
+ return from(Duplexify, body, {
+ // TODO (ronag): highWaterMark?
+ objectMode: true,
+ writable: false
+ })
+ } // TODO: Webstreams.
+ // if (
+ // isReadableStream(body?.readable) &&
+ // isWritableStream(body?.writable)
+ // ) {
+ // return Duplexify.fromWeb(body);
+ // }
+
+ if (
+ typeof (body === null || body === undefined ? undefined : body.writable) === 'object' ||
+ typeof (body === null || body === undefined ? undefined : body.readable) === 'object'
+ ) {
+ const readable =
+ body !== null && body !== undefined && body.readable
+ ? isReadableNodeStream(body === null || body === undefined ? undefined : body.readable)
+ ? body === null || body === undefined
+ ? undefined
+ : body.readable
+ : duplexify(body.readable)
+ : undefined
+ const writable =
+ body !== null && body !== undefined && body.writable
+ ? isWritableNodeStream(body === null || body === undefined ? undefined : body.writable)
+ ? body === null || body === undefined
+ ? undefined
+ : body.writable
+ : duplexify(body.writable)
+ : undefined
+ return _duplexify({
+ readable,
+ writable
+ })
+ }
+
+ const then = body === null || body === undefined ? undefined : body.then
+
+ if (typeof then === 'function') {
+ let d
+ FunctionPrototypeCall(
+ then,
+ body,
+ (val) => {
+ if (val != null) {
+ d.push(val)
+ }
+
+ d.push(null)
+ },
+ (err) => {
+ destroyer(d, err)
+ }
+ )
+ return (d = new Duplexify({
+ objectMode: true,
+ writable: false,
+
+ read() {}
+ }))
+ }
+
+ throw new ERR_INVALID_ARG_TYPE(
+ name,
+ [
+ 'Blob',
+ 'ReadableStream',
+ 'WritableStream',
+ 'Stream',
+ 'Iterable',
+ 'AsyncIterable',
+ 'Function',
+ '{ readable, writable } pair',
+ 'Promise'
+ ],
+ body
+ )
+}
+
+function fromAsyncGen(fn) {
+ let { promise, resolve } = createDeferredPromise()
+ const ac = new AbortController()
+ const signal = ac.signal
+ const value = fn(
+ (async function* () {
+ while (true) {
+ const _promise = promise
+ promise = null
+ const { chunk, done, cb } = await _promise
+ process.nextTick(cb)
+ if (done) return
+ if (signal.aborted)
+ throw new AbortError(undefined, {
+ cause: signal.reason
+ })
+ ;({ promise, resolve } = createDeferredPromise())
+ yield chunk
+ }
+ })(),
+ {
+ signal
+ }
+ )
+ return {
+ value,
+
+ write(chunk, encoding, cb) {
+ const _resolve = resolve
+ resolve = null
+
+ _resolve({
+ chunk,
+ done: false,
+ cb
+ })
+ },
+
+ final(cb) {
+ const _resolve = resolve
+ resolve = null
+
+ _resolve({
+ done: true,
+ cb
+ })
+ },
+
+ destroy(err, cb) {
+ ac.abort()
+ cb(err)
+ }
+ }
+}
+
+function _duplexify(pair) {
+ const r = pair.readable && typeof pair.readable.read !== 'function' ? Readable.wrap(pair.readable) : pair.readable
+ const w = pair.writable
+ let readable = !!isReadable(r)
+ let writable = !!isWritable(w)
+ let ondrain
+ let onfinish
+ let onreadable
+ let onclose
+ let d
+
+ function onfinished(err) {
+ const cb = onclose
+ onclose = null
+
+ if (cb) {
+ cb(err)
+ } else if (err) {
+ d.destroy(err)
+ } else if (!readable && !writable) {
+ d.destroy()
+ }
+ } // TODO(ronag): Avoid double buffering.
+ // Implement Writable/Readable/Duplex traits.
+ // See, https://github.com/nodejs/node/pull/33515.
+
+ d = new Duplexify({
+ // TODO (ronag): highWaterMark?
+ readableObjectMode: !!(r !== null && r !== undefined && r.readableObjectMode),
+ writableObjectMode: !!(w !== null && w !== undefined && w.writableObjectMode),
+ readable,
+ writable
+ })
+
+ if (writable) {
+ eos(w, (err) => {
+ writable = false
+
+ if (err) {
+ destroyer(r, err)
+ }
+
+ onfinished(err)
+ })
+
+ d._write = function (chunk, encoding, callback) {
+ if (w.write(chunk, encoding)) {
+ callback()
+ } else {
+ ondrain = callback
+ }
+ }
+
+ d._final = function (callback) {
+ w.end()
+ onfinish = callback
+ }
+
+ w.on('drain', function () {
+ if (ondrain) {
+ const cb = ondrain
+ ondrain = null
+ cb()
+ }
+ })
+ w.on('finish', function () {
+ if (onfinish) {
+ const cb = onfinish
+ onfinish = null
+ cb()
+ }
+ })
+ }
+
+ if (readable) {
+ eos(r, (err) => {
+ readable = false
+
+ if (err) {
+ destroyer(r, err)
+ }
+
+ onfinished(err)
+ })
+ r.on('readable', function () {
+ if (onreadable) {
+ const cb = onreadable
+ onreadable = null
+ cb()
+ }
+ })
+ r.on('end', function () {
+ d.push(null)
+ })
+
+ d._read = function () {
+ while (true) {
+ const buf = r.read()
+
+ if (buf === null) {
+ onreadable = d._read
+ return
+ }
+
+ if (!d.push(buf)) {
+ return
+ }
+ }
+ }
+ }
+
+ d._destroy = function (err, callback) {
+ if (!err && onclose !== null) {
+ err = new AbortError()
+ }
+
+ onreadable = null
+ ondrain = null
+ onfinish = null
+
+ if (onclose === null) {
+ callback(err)
+ } else {
+ onclose = callback
+ destroyer(w, err)
+ destroyer(r, err)
+ }
+ }
+
+ return d
+}
diff --git a/lib/internal/streams/end-of-stream.js b/lib/internal/streams/end-of-stream.js
index 831f286d98..3bafd53b4c 100644
--- a/lib/internal/streams/end-of-stream.js
+++ b/lib/internal/streams/end-of-stream.js
@@ -1,104 +1,258 @@
// Ported from https://github.com/mafintosh/end-of-stream with
// permission from the author, Mathias Buus (@mafintosh).
-'use strict';
+'use strict'
-var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;
+const { AbortError, codes } = require('../../ours/errors')
-function once(callback) {
- var called = false;
- return function () {
- if (called) return;
- called = true;
+const { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes
- for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
- args[_key] = arguments[_key];
- }
+const { once } = require('../../ours/util')
- callback.apply(this, args);
- };
-}
+const { validateAbortSignal, validateFunction, validateObject } = require('../validators')
-function noop() {}
+const { Promise } = require('../../ours/primordials')
+
+const {
+ isClosed,
+ isReadable,
+ isReadableNodeStream,
+ isReadableFinished,
+ isReadableErrored,
+ isWritable,
+ isWritableNodeStream,
+ isWritableFinished,
+ isWritableErrored,
+ isNodeStream,
+ willEmitClose: _willEmitClose
+} = require('./utils')
function isRequest(stream) {
- return stream.setHeader && typeof stream.abort === 'function';
+ return stream.setHeader && typeof stream.abort === 'function'
}
-function eos(stream, opts, callback) {
- if (typeof opts === 'function') return eos(stream, null, opts);
- if (!opts) opts = {};
- callback = once(callback || noop);
- var readable = opts.readable || opts.readable !== false && stream.readable;
- var writable = opts.writable || opts.writable !== false && stream.writable;
+const nop = () => {}
- var onlegacyfinish = function onlegacyfinish() {
- if (!stream.writable) onfinish();
- };
+function eos(stream, options, callback) {
+ var _options$readable, _options$writable
- var writableEnded = stream._writableState && stream._writableState.finished;
+ if (arguments.length === 2) {
+ callback = options
+ options = {}
+ } else if (options == null) {
+ options = {}
+ } else {
+ validateObject(options, 'options')
+ }
+
+ validateFunction(callback, 'callback')
+ validateAbortSignal(options.signal, 'options.signal')
+ callback = once(callback)
+ const readable =
+ (_options$readable = options.readable) !== null && _options$readable !== undefined
+ ? _options$readable
+ : isReadableNodeStream(stream)
+ const writable =
+ (_options$writable = options.writable) !== null && _options$writable !== undefined
+ ? _options$writable
+ : isWritableNodeStream(stream)
+
+ if (!isNodeStream(stream)) {
+ // TODO: Webstreams.
+ throw new ERR_INVALID_ARG_TYPE('stream', 'Stream', stream)
+ }
- var onfinish = function onfinish() {
- writable = false;
- writableEnded = true;
- if (!readable) callback.call(stream);
- };
+ const wState = stream._writableState
+ const rState = stream._readableState
- var readableEnded = stream._readableState && stream._readableState.endEmitted;
+ const onlegacyfinish = () => {
+ if (!stream.writable) {
+ onfinish()
+ }
+ } // TODO (ronag): Improve soft detection to include core modules and
+ // common ecosystem modules that do properly emit 'close' but fail
+ // this generic check.
- var onend = function onend() {
- readable = false;
- readableEnded = true;
- if (!writable) callback.call(stream);
- };
+ let willEmitClose =
+ _willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable
+ let writableFinished = isWritableFinished(stream, false)
- var onerror = function onerror(err) {
- callback.call(stream, err);
- };
+ const onfinish = () => {
+ writableFinished = true // Stream should not be destroyed here. If it is that
+ // means that user space is doing something differently and
+ // we cannot trust willEmitClose.
- var onclose = function onclose() {
- var err;
+ if (stream.destroyed) {
+ willEmitClose = false
+ }
- if (readable && !readableEnded) {
- if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
- return callback.call(stream, err);
+ if (willEmitClose && (!stream.readable || readable)) {
+ return
}
- if (writable && !writableEnded) {
- if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
- return callback.call(stream, err);
+ if (!readable || readableFinished) {
+ callback.call(stream)
}
- };
+ }
+
+ let readableFinished = isReadableFinished(stream, false)
- var onrequest = function onrequest() {
- stream.req.on('finish', onfinish);
- };
+ const onend = () => {
+ readableFinished = true // Stream should not be destroyed here. If it is that
+ // means that user space is doing something differently and
+ // we cannot trust willEmitClose.
+
+ if (stream.destroyed) {
+ willEmitClose = false
+ }
+
+ if (willEmitClose && (!stream.writable || writable)) {
+ return
+ }
+
+ if (!writable || writableFinished) {
+ callback.call(stream)
+ }
+ }
+
+ const onerror = (err) => {
+ callback.call(stream, err)
+ }
+
+ let closed = isClosed(stream)
+
+ const onclose = () => {
+ closed = true
+ const errored = isWritableErrored(stream) || isReadableErrored(stream)
+
+ if (errored && typeof errored !== 'boolean') {
+ return callback.call(stream, errored)
+ }
+
+ if (readable && !readableFinished && isReadableNodeStream(stream, true)) {
+ if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())
+ }
+
+ if (writable && !writableFinished) {
+ if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())
+ }
+
+ callback.call(stream)
+ }
+
+ const onrequest = () => {
+ stream.req.on('finish', onfinish)
+ }
if (isRequest(stream)) {
- stream.on('complete', onfinish);
- stream.on('abort', onclose);
- if (stream.req) onrequest();else stream.on('request', onrequest);
- } else if (writable && !stream._writableState) {
+ stream.on('complete', onfinish)
+
+ if (!willEmitClose) {
+ stream.on('abort', onclose)
+ }
+
+ if (stream.req) {
+ onrequest()
+ } else {
+ stream.on('request', onrequest)
+ }
+ } else if (writable && !wState) {
// legacy streams
- stream.on('end', onlegacyfinish);
- stream.on('close', onlegacyfinish);
+ stream.on('end', onlegacyfinish)
+ stream.on('close', onlegacyfinish)
+ } // Not all streams will emit 'close' after 'aborted'.
+
+ if (!willEmitClose && typeof stream.aborted === 'boolean') {
+ stream.on('aborted', onclose)
+ }
+
+ stream.on('end', onend)
+ stream.on('finish', onfinish)
+
+ if (options.error !== false) {
+ stream.on('error', onerror)
}
- stream.on('end', onend);
- stream.on('finish', onfinish);
- if (opts.error !== false) stream.on('error', onerror);
- stream.on('close', onclose);
- return function () {
- stream.removeListener('complete', onfinish);
- stream.removeListener('abort', onclose);
- stream.removeListener('request', onrequest);
- if (stream.req) stream.req.removeListener('finish', onfinish);
- stream.removeListener('end', onlegacyfinish);
- stream.removeListener('close', onlegacyfinish);
- stream.removeListener('finish', onfinish);
- stream.removeListener('end', onend);
- stream.removeListener('error', onerror);
- stream.removeListener('close', onclose);
- };
+ stream.on('close', onclose)
+
+ if (closed) {
+ process.nextTick(onclose)
+ } else if (
+ (wState !== null && wState !== undefined && wState.errorEmitted) ||
+ (rState !== null && rState !== undefined && rState.errorEmitted)
+ ) {
+ if (!willEmitClose) {
+ process.nextTick(onclose)
+ }
+ } else if (
+ !readable &&
+ (!willEmitClose || isReadable(stream)) &&
+ (writableFinished || isWritable(stream) === false)
+ ) {
+ process.nextTick(onclose)
+ } else if (
+ !writable &&
+ (!willEmitClose || isWritable(stream)) &&
+ (readableFinished || isReadable(stream) === false)
+ ) {
+ process.nextTick(onclose)
+ } else if (rState && stream.req && stream.aborted) {
+ process.nextTick(onclose)
+ }
+
+ const cleanup = () => {
+ callback = nop
+ stream.removeListener('aborted', onclose)
+ stream.removeListener('complete', onfinish)
+ stream.removeListener('abort', onclose)
+ stream.removeListener('request', onrequest)
+ if (stream.req) stream.req.removeListener('finish', onfinish)
+ stream.removeListener('end', onlegacyfinish)
+ stream.removeListener('close', onlegacyfinish)
+ stream.removeListener('finish', onfinish)
+ stream.removeListener('end', onend)
+ stream.removeListener('error', onerror)
+ stream.removeListener('close', onclose)
+ }
+
+ if (options.signal && !closed) {
+ const abort = () => {
+ // Keep it because cleanup removes it.
+ const endCallback = callback
+ cleanup()
+ endCallback.call(
+ stream,
+ new AbortError(undefined, {
+ cause: options.signal.reason
+ })
+ )
+ }
+
+ if (options.signal.aborted) {
+ process.nextTick(abort)
+ } else {
+ const originalCallback = callback
+ callback = once((...args) => {
+ options.signal.removeEventListener('abort', abort)
+ originalCallback.apply(stream, args)
+ })
+ options.signal.addEventListener('abort', abort)
+ }
+ }
+
+ return cleanup
+}
+
+function finished(stream, opts) {
+ return new Promise((resolve, reject) => {
+ eos(stream, opts, (err) => {
+ if (err) {
+ reject(err)
+ } else {
+ resolve()
+ }
+ })
+ })
}
-module.exports = eos;
\ No newline at end of file
+module.exports = eos
+module.exports.finished = finished
diff --git a/lib/internal/streams/from-browser.js b/lib/internal/streams/from-browser.js
deleted file mode 100644
index a4ce56f3c9..0000000000
--- a/lib/internal/streams/from-browser.js
+++ /dev/null
@@ -1,3 +0,0 @@
-module.exports = function () {
- throw new Error('Readable.from is not available in the browser')
-};
diff --git a/lib/internal/streams/from.js b/lib/internal/streams/from.js
index 6c41284416..6d40ac0aeb 100644
--- a/lib/internal/streams/from.js
+++ b/lib/internal/streams/from.js
@@ -1,64 +1,108 @@
-'use strict';
+'use strict'
-function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials')
-function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = require('../../ours/errors').codes
-function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
-
-function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
+function from(Readable, iterable, opts) {
+ let iterator
-function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
+ if (typeof iterable === 'string' || iterable instanceof Buffer) {
+ return new Readable({
+ objectMode: true,
+ ...opts,
-var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE;
+ read() {
+ this.push(iterable)
+ this.push(null)
+ }
+ })
+ }
-function from(Readable, iterable, opts) {
- var iterator;
+ let isAsync
- if (iterable && typeof iterable.next === 'function') {
- iterator = iterable;
- } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);
+ if (iterable && iterable[SymbolAsyncIterator]) {
+ isAsync = true
+ iterator = iterable[SymbolAsyncIterator]()
+ } else if (iterable && iterable[SymbolIterator]) {
+ isAsync = false
+ iterator = iterable[SymbolIterator]()
+ } else {
+ throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable)
+ }
- var readable = new Readable(_objectSpread({
- objectMode: true
- }, opts)); // Reading boolean to protect against _read
+ const readable = new Readable({
+ objectMode: true,
+ highWaterMark: 1,
+ // TODO(ronag): What options should be allowed?
+ ...opts
+ }) // Flag to protect against _read
// being called before last iteration completion.
- var reading = false;
+ let reading = false
readable._read = function () {
if (!reading) {
- reading = true;
- next();
+ reading = true
+ next()
}
- };
+ }
- function next() {
- return _next2.apply(this, arguments);
+ readable._destroy = function (error, cb) {
+ PromisePrototypeThen(
+ close(error),
+ () => process.nextTick(cb, error), // nextTick is here in case cb throws
+ (e) => process.nextTick(cb, e || error)
+ )
}
- function _next2() {
- _next2 = _asyncToGenerator(function* () {
+ async function close(error) {
+ const hadError = error !== undefined && error !== null
+ const hasThrow = typeof iterator.throw === 'function'
+
+ if (hadError && hasThrow) {
+ const { value, done } = await iterator.throw(error)
+ await value
+
+ if (done) {
+ return
+ }
+ }
+
+ if (typeof iterator.return === 'function') {
+ const { value } = await iterator.return()
+ await value
+ }
+ }
+
+ async function next() {
+ for (;;) {
try {
- var _ref = yield iterator.next(),
- value = _ref.value,
- done = _ref.done;
+ const { value, done } = isAsync ? await iterator.next() : iterator.next()
if (done) {
- readable.push(null);
- } else if (readable.push((yield value))) {
- next();
+ readable.push(null)
} else {
- reading = false;
+ const res = value && typeof value.then === 'function' ? await value : value
+
+ if (res === null) {
+ reading = false
+ throw new ERR_STREAM_NULL_VALUES()
+ } else if (readable.push(res)) {
+ continue
+ } else {
+ reading = false
+ }
}
} catch (err) {
- readable.destroy(err);
+ readable.destroy(err)
}
- });
- return _next2.apply(this, arguments);
+
+ break
+ }
}
- return readable;
+ return readable
}
-module.exports = from;
\ No newline at end of file
+module.exports = from
diff --git a/lib/internal/streams/lazy_transform.js b/lib/internal/streams/lazy_transform.js
new file mode 100644
index 0000000000..06073b0d13
--- /dev/null
+++ b/lib/internal/streams/lazy_transform.js
@@ -0,0 +1,58 @@
+// LazyTransform is a special type of Transform stream that is lazily loaded.
+// This is used for performance with bi-API-ship: when two APIs are available
+// for the stream, one conventional and one non-conventional.
+'use strict'
+
+const { ObjectDefineProperties, ObjectDefineProperty, ObjectSetPrototypeOf } = require('../../ours/primordials')
+
+const stream = require('../../stream')
+
+const { getDefaultEncoding } = require('../crypto/util')
+
+module.exports = LazyTransform
+
+function LazyTransform(options) {
+ this._options = options
+}
+
+ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype)
+ObjectSetPrototypeOf(LazyTransform, stream.Transform)
+
+function makeGetter(name) {
+ return function () {
+ stream.Transform.call(this, this._options)
+ this._writableState.decodeStrings = false
+
+ if (!this._options || !this._options.defaultEncoding) {
+ this._writableState.defaultEncoding = getDefaultEncoding()
+ }
+
+ return this[name]
+ }
+}
+
+function makeSetter(name) {
+ return function (val) {
+ ObjectDefineProperty(this, name, {
+ value: val,
+ enumerable: true,
+ configurable: true,
+ writable: true
+ })
+ }
+}
+
+ObjectDefineProperties(LazyTransform.prototype, {
+ _readableState: {
+ get: makeGetter('_readableState'),
+ set: makeSetter('_readableState'),
+ configurable: true,
+ enumerable: true
+ },
+ _writableState: {
+ get: makeGetter('_writableState'),
+ set: makeSetter('_writableState'),
+ configurable: true,
+ enumerable: true
+ }
+})
diff --git a/lib/internal/streams/legacy.js b/lib/internal/streams/legacy.js
new file mode 100644
index 0000000000..09c3b72013
--- /dev/null
+++ b/lib/internal/streams/legacy.js
@@ -0,0 +1,100 @@
+'use strict'
+
+const { ArrayIsArray, ObjectSetPrototypeOf } = require('../../ours/primordials')
+
+const { EventEmitter: EE } = require('events')
+
+function Stream(opts) {
+ EE.call(this, opts)
+}
+
+ObjectSetPrototypeOf(Stream.prototype, EE.prototype)
+ObjectSetPrototypeOf(Stream, EE)
+
+Stream.prototype.pipe = function (dest, options) {
+ const source = this
+
+ function ondata(chunk) {
+ if (dest.writable && dest.write(chunk) === false && source.pause) {
+ source.pause()
+ }
+ }
+
+ source.on('data', ondata)
+
+ function ondrain() {
+ if (source.readable && source.resume) {
+ source.resume()
+ }
+ }
+
+ dest.on('drain', ondrain) // If the 'end' option is not supplied, dest.end() will be called when
+ // source gets the 'end' or 'close' events. Only dest.end() once.
+
+ if (!dest._isStdio && (!options || options.end !== false)) {
+ source.on('end', onend)
+ source.on('close', onclose)
+ }
+
+ let didOnEnd = false
+
+ function onend() {
+ if (didOnEnd) return
+ didOnEnd = true
+ dest.end()
+ }
+
+ function onclose() {
+ if (didOnEnd) return
+ didOnEnd = true
+ if (typeof dest.destroy === 'function') dest.destroy()
+ } // Don't leave dangling pipes when there are errors.
+
+ function onerror(er) {
+ cleanup()
+
+ if (EE.listenerCount(this, 'error') === 0) {
+ this.emit('error', er)
+ }
+ }
+
+ prependListener(source, 'error', onerror)
+ prependListener(dest, 'error', onerror) // Remove all the event listeners that were added.
+
+ function cleanup() {
+ source.removeListener('data', ondata)
+ dest.removeListener('drain', ondrain)
+ source.removeListener('end', onend)
+ source.removeListener('close', onclose)
+ source.removeListener('error', onerror)
+ dest.removeListener('error', onerror)
+ source.removeListener('end', cleanup)
+ source.removeListener('close', cleanup)
+ dest.removeListener('close', cleanup)
+ }
+
+ source.on('end', cleanup)
+ source.on('close', cleanup)
+ dest.on('close', cleanup)
+ dest.emit('pipe', source) // Allow for unix-like usage: A.pipe(B).pipe(C)
+
+ return dest
+}
+
+function prependListener(emitter, event, fn) {
+ // Sadly this is not cacheable as some libraries bundle their own
+ // event emitter implementation with them.
+ if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn) // This is a hack to make sure that our error handler is attached before any
+ // userland ones. NEVER DO THIS. This is here only because this code needs
+ // to continue to work with older versions of Node.js that do not include
+ // the prependListener() method. The goal is to eventually remove this hack.
+
+ if (!emitter._events || !emitter._events[event]) emitter.on(event, fn)
+ else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn)
+ else emitter._events[event] = [fn, emitter._events[event]]
+}
+
+module.exports = {
+ Stream,
+ prependListener
+}
diff --git a/lib/internal/streams/operators.js b/lib/internal/streams/operators.js
new file mode 100644
index 0000000000..fdea0ef709
--- /dev/null
+++ b/lib/internal/streams/operators.js
@@ -0,0 +1,537 @@
+'use strict'
+
+const abortControllerModule = require('abort-controller')
+
+const AbortController = globalThis.AbortController || abortControllerModule.AbortController
+
+const {
+ codes: { ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE },
+ AbortError
+} = require('../../ours/errors')
+
+const { validateAbortSignal, validateInteger, validateObject } = require('../validators')
+
+const kWeakHandler = require('../../ours/primordials').Symbol('kWeak')
+
+const { finished } = require('./end-of-stream')
+
+const {
+ ArrayPrototypePush,
+ MathFloor,
+ Number,
+ NumberIsNaN,
+ Promise,
+ PromiseReject,
+ PromisePrototypeCatch,
+ Symbol
+} = require('../../ours/primordials')
+
+const kEmpty = Symbol('kEmpty')
+const kEof = Symbol('kEof')
+
+function map(fn, options) {
+ if (typeof fn !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
+ }
+
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+
+ let concurrency = 1
+
+ if ((options === null || options === undefined ? undefined : options.concurrency) != null) {
+ concurrency = MathFloor(options.concurrency)
+ }
+
+ validateInteger(concurrency, 'concurrency', 1)
+ return async function* map() {
+ var _options$signal, _options$signal2
+
+ const ac = new AbortController()
+ const stream = this
+ const queue = []
+ const signal = ac.signal
+ const signalOpt = {
+ signal
+ }
+
+ const abort = () => ac.abort()
+
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal = options.signal) !== null &&
+ _options$signal !== undefined &&
+ _options$signal.aborted
+ ) {
+ abort()
+ }
+
+ options === null || options === undefined
+ ? undefined
+ : (_options$signal2 = options.signal) === null || _options$signal2 === undefined
+ ? undefined
+ : _options$signal2.addEventListener('abort', abort)
+ let next
+ let resume
+ let done = false
+
+ function onDone() {
+ done = true
+ }
+
+ async function pump() {
+ try {
+ for await (let val of stream) {
+ var _val
+
+ if (done) {
+ return
+ }
+
+ if (signal.aborted) {
+ throw new AbortError()
+ }
+
+ try {
+ val = fn(val, signalOpt)
+ } catch (err) {
+ val = PromiseReject(err)
+ }
+
+ if (val === kEmpty) {
+ continue
+ }
+
+ if (typeof ((_val = val) === null || _val === undefined ? undefined : _val.catch) === 'function') {
+ val.catch(onDone)
+ }
+
+ queue.push(val)
+
+ if (next) {
+ next()
+ next = null
+ }
+
+ if (!done && queue.length && queue.length >= concurrency) {
+ await new Promise((resolve) => {
+ resume = resolve
+ })
+ }
+ }
+
+ queue.push(kEof)
+ } catch (err) {
+ const val = PromiseReject(err)
+ PromisePrototypeCatch(val, onDone)
+ queue.push(val)
+ } finally {
+ var _options$signal3
+
+ done = true
+
+ if (next) {
+ next()
+ next = null
+ }
+
+ options === null || options === undefined
+ ? undefined
+ : (_options$signal3 = options.signal) === null || _options$signal3 === undefined
+ ? undefined
+ : _options$signal3.removeEventListener('abort', abort)
+ }
+ }
+
+ pump()
+
+ try {
+ while (true) {
+ while (queue.length > 0) {
+ const val = await queue[0]
+
+ if (val === kEof) {
+ return
+ }
+
+ if (signal.aborted) {
+ throw new AbortError()
+ }
+
+ if (val !== kEmpty) {
+ yield val
+ }
+
+ queue.shift()
+
+ if (resume) {
+ resume()
+ resume = null
+ }
+ }
+
+ await new Promise((resolve) => {
+ next = resolve
+ })
+ }
+ } finally {
+ ac.abort()
+ done = true
+
+ if (resume) {
+ resume()
+ resume = null
+ }
+ }
+ }.call(this)
+}
+
+function asIndexedPairs(options = undefined) {
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+
+ return async function* asIndexedPairs() {
+ let index = 0
+
+ for await (const val of this) {
+ var _options$signal4
+
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal4 = options.signal) !== null &&
+ _options$signal4 !== undefined &&
+ _options$signal4.aborted
+ ) {
+ throw new AbortError({
+ cause: options.signal.reason
+ })
+ }
+
+ yield [index++, val]
+ }
+ }.call(this)
+}
+
+async function some(fn, options = undefined) {
+ // eslint-disable-next-line no-unused-vars
+ for await (const unused of filter.call(this, fn, options)) {
+ return true
+ }
+
+ return false
+}
+
+async function every(fn, options = undefined) {
+ if (typeof fn !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
+ } // https://en.wikipedia.org/wiki/De_Morgan%27s_laws
+
+ return !(await some.call(
+ this,
+ async (...args) => {
+ return !(await fn(...args))
+ },
+ options
+ ))
+}
+
+async function find(fn, options) {
+ for await (const result of filter.call(this, fn, options)) {
+ return result
+ }
+
+ return undefined
+}
+
+async function forEach(fn, options) {
+ if (typeof fn !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
+ }
+
+ async function forEachFn(value, options) {
+ await fn(value, options)
+ return kEmpty
+ } // eslint-disable-next-line no-unused-vars
+
+ for await (const unused of map.call(this, forEachFn, options));
+}
+
+function filter(fn, options) {
+ if (typeof fn !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
+ }
+
+ async function filterFn(value, options) {
+ if (await fn(value, options)) {
+ return value
+ }
+
+ return kEmpty
+ }
+
+ return map.call(this, filterFn, options)
+} // Specific to provide better error to reduce since the argument is only
+// missing if the stream has no items in it - but the code is still appropriate
+
+class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS {
+ constructor() {
+ super('reduce')
+ this.message = 'Reduce of an empty stream requires an initial value'
+ }
+}
+
+async function reduce(reducer, initialValue, options) {
+ var _options$signal5
+
+ if (typeof reducer !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE('reducer', ['Function', 'AsyncFunction'], reducer)
+ }
+
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+
+ let hasInitialValue = arguments.length > 1
+
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal5 = options.signal) !== null &&
+ _options$signal5 !== undefined &&
+ _options$signal5.aborted
+ ) {
+ const err = new AbortError(undefined, {
+ cause: options.signal.reason
+ })
+ this.once('error', () => {}) // The error is already propagated
+
+ await finished(this.destroy(err))
+ throw err
+ }
+
+ const ac = new AbortController()
+ const signal = ac.signal
+
+ if (options !== null && options !== undefined && options.signal) {
+ const opts = {
+ once: true,
+ [kWeakHandler]: this
+ }
+ options.signal.addEventListener('abort', () => ac.abort(), opts)
+ }
+
+ let gotAnyItemFromStream = false
+
+ try {
+ for await (const value of this) {
+ var _options$signal6
+
+ gotAnyItemFromStream = true
+
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal6 = options.signal) !== null &&
+ _options$signal6 !== undefined &&
+ _options$signal6.aborted
+ ) {
+ throw new AbortError()
+ }
+
+ if (!hasInitialValue) {
+ initialValue = value
+ hasInitialValue = true
+ } else {
+ initialValue = await reducer(initialValue, value, {
+ signal
+ })
+ }
+ }
+
+ if (!gotAnyItemFromStream && !hasInitialValue) {
+ throw new ReduceAwareErrMissingArgs()
+ }
+ } finally {
+ ac.abort()
+ }
+
+ return initialValue
+}
+
+async function toArray(options) {
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+
+ const result = []
+
+ for await (const val of this) {
+ var _options$signal7
+
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal7 = options.signal) !== null &&
+ _options$signal7 !== undefined &&
+ _options$signal7.aborted
+ ) {
+ throw new AbortError(undefined, {
+ cause: options.signal.reason
+ })
+ }
+
+ ArrayPrototypePush(result, val)
+ }
+
+ return result
+}
+
+function flatMap(fn, options) {
+ const values = map.call(this, fn, options)
+ return async function* flatMap() {
+ for await (const val of values) {
+ yield* val
+ }
+ }.call(this)
+}
+
+function toIntegerOrInfinity(number) {
+ // We coerce here to align with the spec
+ // https://github.com/tc39/proposal-iterator-helpers/issues/169
+ number = Number(number)
+
+ if (NumberIsNaN(number)) {
+ return 0
+ }
+
+ if (number < 0) {
+ throw new ERR_OUT_OF_RANGE('number', '>= 0', number)
+ }
+
+ return number
+}
+
+function drop(number, options = undefined) {
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+
+ number = toIntegerOrInfinity(number)
+ return async function* drop() {
+ var _options$signal8
+
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal8 = options.signal) !== null &&
+ _options$signal8 !== undefined &&
+ _options$signal8.aborted
+ ) {
+ throw new AbortError()
+ }
+
+ for await (const val of this) {
+ var _options$signal9
+
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal9 = options.signal) !== null &&
+ _options$signal9 !== undefined &&
+ _options$signal9.aborted
+ ) {
+ throw new AbortError()
+ }
+
+ if (number-- <= 0) {
+ yield val
+ }
+ }
+ }.call(this)
+}
+
+function take(number, options = undefined) {
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+
+ number = toIntegerOrInfinity(number)
+ return async function* take() {
+ var _options$signal10
+
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal10 = options.signal) !== null &&
+ _options$signal10 !== undefined &&
+ _options$signal10.aborted
+ ) {
+ throw new AbortError()
+ }
+
+ for await (const val of this) {
+ var _options$signal11
+
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal11 = options.signal) !== null &&
+ _options$signal11 !== undefined &&
+ _options$signal11.aborted
+ ) {
+ throw new AbortError()
+ }
+
+ if (number-- > 0) {
+ yield val
+ } else {
+ return
+ }
+ }
+ }.call(this)
+}
+
+module.exports.streamReturningOperators = {
+ asIndexedPairs,
+ drop,
+ filter,
+ flatMap,
+ map,
+ take
+}
+module.exports.promiseReturningOperators = {
+ every,
+ forEach,
+ reduce,
+ toArray,
+ some,
+ find
+}
diff --git a/lib/internal/streams/passthrough.js b/lib/internal/streams/passthrough.js
new file mode 100644
index 0000000000..55c551723e
--- /dev/null
+++ b/lib/internal/streams/passthrough.js
@@ -0,0 +1,42 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+// a passthrough stream.
+// basically just the most minimal sort of Transform stream.
+// Every written chunk gets output as-is.
+'use strict'
+
+const { ObjectSetPrototypeOf } = require('../../ours/primordials')
+
+module.exports = PassThrough
+
+const Transform = require('./transform')
+
+ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype)
+ObjectSetPrototypeOf(PassThrough, Transform)
+
+function PassThrough(options) {
+ if (!(this instanceof PassThrough)) return new PassThrough(options)
+ Transform.call(this, options)
+}
+
+PassThrough.prototype._transform = function (chunk, encoding, cb) {
+ cb(null, chunk)
+}
diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js
index 6589909889..4e6e82e1c6 100644
--- a/lib/internal/streams/pipeline.js
+++ b/lib/internal/streams/pipeline.js
@@ -1,97 +1,415 @@
// Ported from https://github.com/mafintosh/pump with
// permission from the author, Mathias Buus (@mafintosh).
-'use strict';
+'use strict'
-var eos;
+const abortControllerModule = require('abort-controller')
-function once(callback) {
- var called = false;
- return function () {
- if (called) return;
- called = true;
- callback.apply(void 0, arguments);
- };
-}
+const { ArrayIsArray, Promise, SymbolAsyncIterator } = require('../../ours/primordials')
+
+const eos = require('./end-of-stream')
+
+const { once } = require('../../ours/util')
+
+const destroyImpl = require('./destroy')
+
+const Duplex = require('./duplex')
-var _require$codes = require('../../../errors').codes,
- ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
- ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
+const {
+ aggregateTwoErrors,
+ codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE, ERR_MISSING_ARGS, ERR_STREAM_DESTROYED },
+ AbortError
+} = require('../../ours/errors')
-function noop(err) {
- // Rethrow the error if it exists to avoid swallowing it
- if (err) throw err;
+const { validateFunction, validateAbortSignal } = require('../validators')
+
+const { isIterable, isReadable, isReadableNodeStream, isNodeStream } = require('./utils')
+
+const AbortController = globalThis.AbortController || abortControllerModule.AbortController
+let PassThrough
+let Readable
+
+function destroyer(stream, reading, writing) {
+ let finished = false
+ stream.on('close', () => {
+ finished = true
+ })
+ const cleanup = eos(
+ stream,
+ {
+ readable: reading,
+ writable: writing
+ },
+ (err) => {
+ finished = !err
+ }
+ )
+ return {
+ destroy: (err) => {
+ if (finished) return
+ finished = true
+ destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe'))
+ },
+ cleanup
+ }
}
-function isRequest(stream) {
- return stream.setHeader && typeof stream.abort === 'function';
+function popCallback(streams) {
+ // Streams should never be an empty array. It should always contain at least
+ // a single stream. Therefore optimize for the average case instead of
+ // checking for length === 0 as well.
+ validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]')
+ return streams.pop()
}
-function destroyer(stream, reading, writing, callback) {
- callback = once(callback);
- var closed = false;
- stream.on('close', function () {
- closed = true;
- });
- if (eos === undefined) eos = require('./end-of-stream');
- eos(stream, {
- readable: reading,
- writable: writing
- }, function (err) {
- if (err) return callback(err);
- closed = true;
- callback();
- });
- var destroyed = false;
- return function (err) {
- if (closed) return;
- if (destroyed) return;
- destroyed = true; // request.destroy just do .end - .abort is what we want
-
- if (isRequest(stream)) return stream.abort();
- if (typeof stream.destroy === 'function') return stream.destroy();
- callback(err || new ERR_STREAM_DESTROYED('pipe'));
- };
+function makeAsyncIterable(val) {
+ if (isIterable(val)) {
+ return val
+ } else if (isReadableNodeStream(val)) {
+ // Legacy streams are not Iterable.
+ return fromReadable(val)
+ }
+
+ throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], val)
}
-function call(fn) {
- fn();
+async function* fromReadable(val) {
+ if (!Readable) {
+ Readable = require('./readable')
+ }
+
+ yield* Readable.prototype[SymbolAsyncIterator].call(val)
}
-function pipe(from, to) {
- return from.pipe(to);
+async function pump(iterable, writable, finish, { end }) {
+ let error
+ let onresolve = null
+
+ const resume = (err) => {
+ if (err) {
+ error = err
+ }
+
+ if (onresolve) {
+ const callback = onresolve
+ onresolve = null
+ callback()
+ }
+ }
+
+ const wait = () =>
+ new Promise((resolve, reject) => {
+ if (error) {
+ reject(error)
+ } else {
+ onresolve = () => {
+ if (error) {
+ reject(error)
+ } else {
+ resolve()
+ }
+ }
+ }
+ })
+
+ writable.on('drain', resume)
+ const cleanup = eos(
+ writable,
+ {
+ readable: false
+ },
+ resume
+ )
+
+ try {
+ if (writable.writableNeedDrain) {
+ await wait()
+ }
+
+ for await (const chunk of iterable) {
+ if (!writable.write(chunk)) {
+ await wait()
+ }
+ }
+
+ if (end) {
+ writable.end()
+ }
+
+ await wait()
+ finish()
+ } catch (err) {
+ finish(error !== err ? aggregateTwoErrors(error, err) : err)
+ } finally {
+ cleanup()
+ writable.off('drain', resume)
+ }
}
-function popCallback(streams) {
- if (!streams.length) return noop;
- if (typeof streams[streams.length - 1] !== 'function') return noop;
- return streams.pop();
+function pipeline(...streams) {
+ return pipelineImpl(streams, once(popCallback(streams)))
}
-function pipeline() {
- for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {
- streams[_key] = arguments[_key];
+function pipelineImpl(streams, callback, opts) {
+ if (streams.length === 1 && ArrayIsArray(streams[0])) {
+ streams = streams[0]
}
- var callback = popCallback(streams);
- if (Array.isArray(streams[0])) streams = streams[0];
-
if (streams.length < 2) {
- throw new ERR_MISSING_ARGS('streams');
+ throw new ERR_MISSING_ARGS('streams')
+ }
+
+ const ac = new AbortController()
+ const signal = ac.signal
+ const outerSignal = opts === null || opts === undefined ? undefined : opts.signal // Need to cleanup event listeners if last stream is readable
+ // https://github.com/nodejs/node/issues/35452
+
+ const lastStreamCleanup = []
+ validateAbortSignal(outerSignal, 'options.signal')
+
+ function abort() {
+ finishImpl(new AbortError())
}
- var error;
- var destroys = streams.map(function (stream, i) {
- var reading = i < streams.length - 1;
- var writing = i > 0;
- return destroyer(stream, reading, writing, function (err) {
- if (!error) error = err;
- if (err) destroys.forEach(call);
- if (reading) return;
- destroys.forEach(call);
- callback(error);
- });
- });
- return streams.reduce(pipe);
+ outerSignal === null || outerSignal === undefined ? undefined : outerSignal.addEventListener('abort', abort)
+ let error
+ let value
+ const destroys = []
+ let finishCount = 0
+
+ function finish(err) {
+ finishImpl(err, --finishCount === 0)
+ }
+
+ function finishImpl(err, final) {
+ if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) {
+ error = err
+ }
+
+ if (!error && !final) {
+ return
+ }
+
+ while (destroys.length) {
+ destroys.shift()(error)
+ }
+
+ outerSignal === null || outerSignal === undefined ? undefined : outerSignal.removeEventListener('abort', abort)
+ ac.abort()
+
+ if (final) {
+ if (!error) {
+ lastStreamCleanup.forEach((fn) => fn())
+ }
+
+ process.nextTick(callback, error, value)
+ }
+ }
+
+ let ret
+
+ for (let i = 0; i < streams.length; i++) {
+ const stream = streams[i]
+ const reading = i < streams.length - 1
+ const writing = i > 0
+ const end = reading || (opts === null || opts === undefined ? undefined : opts.end) !== false
+ const isLastStream = i === streams.length - 1
+
+ if (isNodeStream(stream)) {
+ if (end) {
+ const { destroy, cleanup } = destroyer(stream, reading, writing)
+ destroys.push(destroy)
+
+ if (isReadable(stream) && isLastStream) {
+ lastStreamCleanup.push(cleanup)
+ }
+ } // Catch stream errors that occur after pipe/pump has completed.
+
+ function onError(err) {
+ if (err && err.name !== 'AbortError' && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
+ finish(err)
+ }
+ }
+
+ stream.on('error', onError)
+
+ if (isReadable(stream) && isLastStream) {
+ lastStreamCleanup.push(() => {
+ stream.removeListener('error', onError)
+ })
+ }
+ }
+
+ if (i === 0) {
+ if (typeof stream === 'function') {
+ ret = stream({
+ signal
+ })
+
+ if (!isIterable(ret)) {
+ throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret)
+ }
+ } else if (isIterable(stream) || isReadableNodeStream(stream)) {
+ ret = stream
+ } else {
+ ret = Duplex.from(stream)
+ }
+ } else if (typeof stream === 'function') {
+ ret = makeAsyncIterable(ret)
+ ret = stream(ret, {
+ signal
+ })
+
+ if (reading) {
+ if (!isIterable(ret, true)) {
+ throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret)
+ }
+ } else {
+ var _ret
+
+ if (!PassThrough) {
+ PassThrough = require('./passthrough')
+ } // If the last argument to pipeline is not a stream
+ // we must create a proxy stream so that pipeline(...)
+ // always returns a stream which can be further
+ // composed through `.pipe(stream)`.
+
+ const pt = new PassThrough({
+ objectMode: true
+ }) // Handle Promises/A+ spec, `then` could be a getter that throws on
+ // second use.
+
+ const then = (_ret = ret) === null || _ret === undefined ? undefined : _ret.then
+
+ if (typeof then === 'function') {
+ finishCount++
+ then.call(
+ ret,
+ (val) => {
+ value = val
+
+ if (val != null) {
+ pt.write(val)
+ }
+
+ if (end) {
+ pt.end()
+ }
+
+ process.nextTick(finish)
+ },
+ (err) => {
+ pt.destroy(err)
+ process.nextTick(finish, err)
+ }
+ )
+ } else if (isIterable(ret, true)) {
+ finishCount++
+ pump(ret, pt, finish, {
+ end
+ })
+ } else {
+ throw new ERR_INVALID_RETURN_VALUE('AsyncIterable or Promise', 'destination', ret)
+ }
+
+ ret = pt
+ const { destroy, cleanup } = destroyer(ret, false, true)
+ destroys.push(destroy)
+
+ if (isLastStream) {
+ lastStreamCleanup.push(cleanup)
+ }
+ }
+ } else if (isNodeStream(stream)) {
+ if (isReadableNodeStream(ret)) {
+ finishCount += 2
+ const cleanup = pipe(ret, stream, finish, {
+ end
+ })
+
+ if (isReadable(stream) && isLastStream) {
+ lastStreamCleanup.push(cleanup)
+ }
+ } else if (isIterable(ret)) {
+ finishCount++
+ pump(ret, stream, finish, {
+ end
+ })
+ } else {
+ throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], ret)
+ }
+
+ ret = stream
+ } else {
+ ret = Duplex.from(stream)
+ }
+ }
+
+ if (
+ (signal !== null && signal !== undefined && signal.aborted) ||
+ (outerSignal !== null && outerSignal !== undefined && outerSignal.aborted)
+ ) {
+ process.nextTick(abort)
+ }
+
+ return ret
+}
+
+function pipe(src, dst, finish, { end }) {
+ src.pipe(dst, {
+ end
+ })
+
+ if (end) {
+ // Compat. Before node v10.12.0 stdio used to throw an error so
+ // pipe() did/does not end() stdio destinations.
+ // Now they allow it but "secretly" don't close the underlying fd.
+ src.once('end', () => dst.end())
+ } else {
+ finish()
+ }
+
+ eos(
+ src,
+ {
+ readable: true,
+ writable: false
+ },
+ (err) => {
+ const rState = src._readableState
+
+ if (
+ err &&
+ err.code === 'ERR_STREAM_PREMATURE_CLOSE' &&
+ rState &&
+ rState.ended &&
+ !rState.errored &&
+ !rState.errorEmitted
+ ) {
+ // Some readable streams will emit 'close' before 'end'. However, since
+ // this is on the readable side 'end' should still be emitted if the
+ // stream has been ended and no error emitted. This should be allowed in
+ // favor of backwards compatibility. Since the stream is piped to a
+ // destination this should not result in any observable difference.
+ // We don't need to check if this is a writable premature close since
+ // eos will only fail with premature close on the reading side for
+ // duplex streams.
+ src.once('end', finish).once('error', finish)
+ } else {
+ finish(err)
+ }
+ }
+ )
+ return eos(
+ dst,
+ {
+ readable: false,
+ writable: true
+ },
+ finish
+ )
}
-module.exports = pipeline;
\ No newline at end of file
+module.exports = {
+ pipelineImpl,
+ pipeline
+}
diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js
new file mode 100644
index 0000000000..9c263c8a6e
--- /dev/null
+++ b/lib/internal/streams/readable.js
@@ -0,0 +1,1314 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
+
+const {
+ ArrayPrototypeIndexOf,
+ NumberIsInteger,
+ NumberIsNaN,
+ NumberParseInt,
+ ObjectDefineProperties,
+ ObjectKeys,
+ ObjectSetPrototypeOf,
+ Promise,
+ SafeSet,
+ SymbolAsyncIterator,
+ Symbol
+} = require('../../ours/primordials')
+
+module.exports = Readable
+Readable.ReadableState = ReadableState
+
+const { EventEmitter: EE } = require('events')
+
+const { Stream, prependListener } = require('./legacy')
+
+const { addAbortSignal } = require('./add-abort-signal')
+
+const eos = require('./end-of-stream')
+
+let debug = require('../../ours/util').debuglog('stream', (fn) => {
+ debug = fn
+})
+
+const BufferList = require('./buffer_list')
+
+const destroyImpl = require('./destroy')
+
+const { getHighWaterMark, getDefaultHighWaterMark } = require('./state')
+
+const {
+ aggregateTwoErrors,
+ codes: {
+ ERR_INVALID_ARG_TYPE,
+ ERR_METHOD_NOT_IMPLEMENTED,
+ ERR_OUT_OF_RANGE,
+ ERR_STREAM_PUSH_AFTER_EOF,
+ ERR_STREAM_UNSHIFT_AFTER_END_EVENT
+ }
+} = require('../../ours/errors')
+
+const { validateObject } = require('../validators')
+
+const kPaused = Symbol('kPaused')
+
+const { StringDecoder } = require('string_decoder')
+
+const from = require('./from')
+
+ObjectSetPrototypeOf(Readable.prototype, Stream.prototype)
+ObjectSetPrototypeOf(Readable, Stream)
+
+const nop = () => {}
+
+const { errorOrDestroy } = destroyImpl
+
+function ReadableState(options, stream, isDuplex) {
+ // Duplex streams are both readable and writable, but share
+ // the same options object.
+ // However, some cases require setting options to different
+ // values for the readable and the writable sides of the duplex stream.
+ // These options can be provided separately as readableXXX and writableXXX.
+ if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex') // Object stream flag. Used to make read(n) ignore n and to
+ // make all the buffer merging and length checks go away.
+
+ this.objectMode = !!(options && options.objectMode)
+ if (isDuplex) this.objectMode = this.objectMode || !!(options && options.readableObjectMode) // The point at which it stops calling _read() to fill the buffer
+ // Note: 0 is a valid value, means "don't call _read preemptively ever"
+
+ this.highWaterMark = options
+ ? getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex)
+ : getDefaultHighWaterMark(false) // A linked list is used to store data chunks instead of an array because the
+ // linked list can remove elements from the beginning faster than
+ // array.shift().
+
+ this.buffer = new BufferList()
+ this.length = 0
+ this.pipes = []
+ this.flowing = null
+ this.ended = false
+ this.endEmitted = false
+ this.reading = false // Stream is still being constructed and cannot be
+ // destroyed until construction finished or failed.
+ // Async construction is opt in, therefore we start as
+ // constructed.
+
+ this.constructed = true // A flag to be able to tell if the event 'readable'/'data' is emitted
+ // immediately, or on a later tick. We set this to true at first, because
+ // any actions that shouldn't happen until "later" should generally also
+ // not happen before the first read call.
+
+ this.sync = true // Whenever we return null, then we set a flag to say
+ // that we're awaiting a 'readable' event emission.
+
+ this.needReadable = false
+ this.emittedReadable = false
+ this.readableListening = false
+ this.resumeScheduled = false
+ this[kPaused] = null // True if the error was already emitted and should not be thrown again.
+
+ this.errorEmitted = false // Should close be emitted on destroy. Defaults to true.
+
+ this.emitClose = !options || options.emitClose !== false // Should .destroy() be called after 'end' (and potentially 'finish').
+
+ this.autoDestroy = !options || options.autoDestroy !== false // Has it been destroyed.
+
+ this.destroyed = false // Indicates whether the stream has errored. When true no further
+ // _read calls, 'data' or 'readable' events should occur. This is needed
+ // since when autoDestroy is disabled we need a way to tell whether the
+ // stream has failed.
+
+ this.errored = null // Indicates whether the stream has finished destroying.
+
+ this.closed = false // True if close has been emitted or would have been emitted
+ // depending on emitClose.
+
+ this.closeEmitted = false // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+
+ this.defaultEncoding = (options && options.defaultEncoding) || 'utf8' // Ref the piped dest which we need a drain event on it
+ // type: null | Writable | Set.
+
+ this.awaitDrainWriters = null
+ this.multiAwaitDrain = false // If true, a maybeReadMore has been scheduled.
+
+ this.readingMore = false
+ this.dataEmitted = false
+ this.decoder = null
+ this.encoding = null
+
+ if (options && options.encoding) {
+ this.decoder = new StringDecoder(options.encoding)
+ this.encoding = options.encoding
+ }
+}
+
+function Readable(options) {
+ if (!(this instanceof Readable)) return new Readable(options) // Checking for a Stream.Duplex instance is faster here instead of inside
+ // the ReadableState constructor, at least with V8 6.5.
+
+ const isDuplex = this instanceof require('./duplex')
+
+ this._readableState = new ReadableState(options, this, isDuplex)
+
+ if (options) {
+ if (typeof options.read === 'function') this._read = options.read
+ if (typeof options.destroy === 'function') this._destroy = options.destroy
+ if (typeof options.construct === 'function') this._construct = options.construct
+ if (options.signal && !isDuplex) addAbortSignal(options.signal, this)
+ }
+
+ Stream.call(this, options)
+ destroyImpl.construct(this, () => {
+ if (this._readableState.needReadable) {
+ maybeReadMore(this, this._readableState)
+ }
+ })
+}
+
+Readable.prototype.destroy = destroyImpl.destroy
+Readable.prototype._undestroy = destroyImpl.undestroy
+
+Readable.prototype._destroy = function (err, cb) {
+ cb(err)
+}
+
+Readable.prototype[EE.captureRejectionSymbol] = function (err) {
+ this.destroy(err)
+} // Manually shove something into the read() buffer.
+// This returns true if the highWaterMark has not been hit yet,
+// similar to how Writable.write() returns true if you should
+// write() some more.
+
+Readable.prototype.push = function (chunk, encoding) {
+ return readableAddChunk(this, chunk, encoding, false)
+} // Unshift should *always* be something directly out of read().
+
+Readable.prototype.unshift = function (chunk, encoding) {
+ return readableAddChunk(this, chunk, encoding, true)
+}
+
+function readableAddChunk(stream, chunk, encoding, addToFront) {
+ debug('readableAddChunk', chunk)
+ const state = stream._readableState
+ let err
+
+ if (!state.objectMode) {
+ if (typeof chunk === 'string') {
+ encoding = encoding || state.defaultEncoding
+
+ if (state.encoding !== encoding) {
+ if (addToFront && state.encoding) {
+ // When unshifting, if state.encoding is set, we have to save
+ // the string in the BufferList with the state encoding.
+ chunk = Buffer.from(chunk, encoding).toString(state.encoding)
+ } else {
+ chunk = Buffer.from(chunk, encoding)
+ encoding = ''
+ }
+ }
+ } else if (chunk instanceof Buffer) {
+ encoding = ''
+ } else if (Stream._isUint8Array(chunk)) {
+ chunk = Stream._uint8ArrayToBuffer(chunk)
+ encoding = ''
+ } else if (chunk != null) {
+ err = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk)
+ }
+ }
+
+ if (err) {
+ errorOrDestroy(stream, err)
+ } else if (chunk === null) {
+ state.reading = false
+ onEofChunk(stream, state)
+ } else if (state.objectMode || (chunk && chunk.length > 0)) {
+ if (addToFront) {
+ if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT())
+ else if (state.destroyed || state.errored) return false
+ else addChunk(stream, state, chunk, true)
+ } else if (state.ended) {
+ errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF())
+ } else if (state.destroyed || state.errored) {
+ return false
+ } else {
+ state.reading = false
+
+ if (state.decoder && !encoding) {
+ chunk = state.decoder.write(chunk)
+ if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false)
+ else maybeReadMore(stream, state)
+ } else {
+ addChunk(stream, state, chunk, false)
+ }
+ }
+ } else if (!addToFront) {
+ state.reading = false
+ maybeReadMore(stream, state)
+ } // We can push more data if we are below the highWaterMark.
+ // Also, if we have no data yet, we can stand some more bytes.
+ // This is to work around cases where hwm=0, such as the repl.
+
+ return !state.ended && (state.length < state.highWaterMark || state.length === 0)
+}
+
+function addChunk(stream, state, chunk, addToFront) {
+ if (state.flowing && state.length === 0 && !state.sync && stream.listenerCount('data') > 0) {
+ // Use the guard to avoid creating `Set()` repeatedly
+ // when we have multiple pipes.
+ if (state.multiAwaitDrain) {
+ state.awaitDrainWriters.clear()
+ } else {
+ state.awaitDrainWriters = null
+ }
+
+ state.dataEmitted = true
+ stream.emit('data', chunk)
+ } else {
+ // Update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length
+ if (addToFront) state.buffer.unshift(chunk)
+ else state.buffer.push(chunk)
+ if (state.needReadable) emitReadable(stream)
+ }
+
+ maybeReadMore(stream, state)
+}
+
+Readable.prototype.isPaused = function () {
+ const state = this._readableState
+ return state[kPaused] === true || state.flowing === false
+} // Backwards compatibility.
+
+Readable.prototype.setEncoding = function (enc) {
+ const decoder = new StringDecoder(enc)
+ this._readableState.decoder = decoder // If setEncoding(null), decoder.encoding equals utf8.
+
+ this._readableState.encoding = this._readableState.decoder.encoding
+ const buffer = this._readableState.buffer // Iterate over current buffer to convert already stored Buffers:
+
+ let content = ''
+
+ for (const data of buffer) {
+ content += decoder.write(data)
+ }
+
+ buffer.clear()
+ if (content !== '') buffer.push(content)
+ this._readableState.length = content.length
+ return this
+} // Don't raise the hwm > 1GB.
+
+const MAX_HWM = 0x40000000
+
+function computeNewHighWaterMark(n) {
+ if (n > MAX_HWM) {
+ throw new ERR_OUT_OF_RANGE('size', '<= 1GiB', n)
+ } else {
+ // Get the next highest power of 2 to prevent increasing hwm excessively in
+ // tiny amounts.
+ n--
+ n |= n >>> 1
+ n |= n >>> 2
+ n |= n >>> 4
+ n |= n >>> 8
+ n |= n >>> 16
+ n++
+ }
+
+ return n
+} // This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+
+function howMuchToRead(n, state) {
+ if (n <= 0 || (state.length === 0 && state.ended)) return 0
+ if (state.objectMode) return 1
+
+ if (NumberIsNaN(n)) {
+ // Only flow one buffer at a time.
+ if (state.flowing && state.length) return state.buffer.first().length
+ return state.length
+ }
+
+ if (n <= state.length) return n
+ return state.ended ? state.length : 0
+} // You can override either this method, or the async _read(n) below.
+
+Readable.prototype.read = function (n) {
+ debug('read', n) // Same as parseInt(undefined, 10), however V8 7.3 performance regressed
+ // in this scenario, so we are doing it manually.
+
+ if (n === undefined) {
+ n = NaN
+ } else if (!NumberIsInteger(n)) {
+ n = NumberParseInt(n, 10)
+ }
+
+ const state = this._readableState
+ const nOrig = n // If we're asking for more than the current hwm, then raise the hwm.
+
+ if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n)
+ if (n !== 0) state.emittedReadable = false // If we're doing read(0) to trigger a readable event, but we
+ // already have a bunch of data in the buffer, then just trigger
+ // the 'readable' event and move on.
+
+ if (
+ n === 0 &&
+ state.needReadable &&
+ ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)
+ ) {
+ debug('read: emitReadable', state.length, state.ended)
+ if (state.length === 0 && state.ended) endReadable(this)
+ else emitReadable(this)
+ return null
+ }
+
+ n = howMuchToRead(n, state) // If we've ended, and we're now clear, then finish it up.
+
+ if (n === 0 && state.ended) {
+ if (state.length === 0) endReadable(this)
+ return null
+ } // All the actual chunk generation logic needs to be
+ // *below* the call to _read. The reason is that in certain
+ // synthetic stream cases, such as passthrough streams, _read
+ // may be a completely synchronous operation which may change
+ // the state of the read buffer, providing enough data when
+ // before there was *not* enough.
+ //
+ // So, the steps are:
+ // 1. Figure out what the state of things will be after we do
+ // a read from the buffer.
+ //
+ // 2. If that resulting state will trigger a _read, then call _read.
+ // Note that this may be asynchronous, or synchronous. Yes, it is
+ // deeply ugly to write APIs this way, but that still doesn't mean
+ // that the Readable class should behave improperly, as streams are
+ // designed to be sync/async agnostic.
+ // Take note if the _read call is sync or async (ie, if the read call
+ // has returned yet), so that we know whether or not it's safe to emit
+ // 'readable' etc.
+ //
+ // 3. Actually pull the requested chunks out of the buffer and return.
+ // if we need a readable event, then we need to do some reading.
+
+ let doRead = state.needReadable
+ debug('need readable', doRead) // If we currently have less than the highWaterMark, then also read some.
+
+ if (state.length === 0 || state.length - n < state.highWaterMark) {
+ doRead = true
+ debug('length less than watermark', doRead)
+ } // However, if we've ended, then there's no point, if we're already
+ // reading, then it's unnecessary, if we're constructing we have to wait,
+ // and if we're destroyed or errored, then it's not allowed,
+
+ if (state.ended || state.reading || state.destroyed || state.errored || !state.constructed) {
+ doRead = false
+ debug('reading, ended or constructing', doRead)
+ } else if (doRead) {
+ debug('do read')
+ state.reading = true
+ state.sync = true // If the length is currently zero, then we *need* a readable event.
+
+ if (state.length === 0) state.needReadable = true // Call internal read method
+
+ try {
+ this._read(state.highWaterMark)
+ } catch (err) {
+ errorOrDestroy(this, err)
+ }
+
+ state.sync = false // If _read pushed data synchronously, then `reading` will be false,
+ // and we need to re-evaluate how much data we can return to the user.
+
+ if (!state.reading) n = howMuchToRead(nOrig, state)
+ }
+
+ let ret
+ if (n > 0) ret = fromList(n, state)
+ else ret = null
+
+ if (ret === null) {
+ state.needReadable = state.length <= state.highWaterMark
+ n = 0
+ } else {
+ state.length -= n
+
+ if (state.multiAwaitDrain) {
+ state.awaitDrainWriters.clear()
+ } else {
+ state.awaitDrainWriters = null
+ }
+ }
+
+ if (state.length === 0) {
+ // If we have nothing in the buffer, then we want to know
+ // as soon as we *do* get something into the buffer.
+ if (!state.ended) state.needReadable = true // If we tried to read() past the EOF, then emit end on the next tick.
+
+ if (nOrig !== n && state.ended) endReadable(this)
+ }
+
+ if (ret !== null && !state.errorEmitted && !state.closeEmitted) {
+ state.dataEmitted = true
+ this.emit('data', ret)
+ }
+
+ return ret
+}
+
+function onEofChunk(stream, state) {
+ debug('onEofChunk')
+ if (state.ended) return
+
+ if (state.decoder) {
+ const chunk = state.decoder.end()
+
+ if (chunk && chunk.length) {
+ state.buffer.push(chunk)
+ state.length += state.objectMode ? 1 : chunk.length
+ }
+ }
+
+ state.ended = true
+
+ if (state.sync) {
+ // If we are sync, wait until next tick to emit the data.
+ // Otherwise we risk emitting data in the flow()
+ // the readable code triggers during a read() call.
+ emitReadable(stream)
+ } else {
+ // Emit 'readable' now to make sure it gets picked up.
+ state.needReadable = false
+ state.emittedReadable = true // We have to emit readable now that we are EOF. Modules
+ // in the ecosystem (e.g. dicer) rely on this event being sync.
+
+ emitReadable_(stream)
+ }
+} // Don't emit readable right away in sync mode, because this can trigger
+// another read() call => stack overflow. This way, it might trigger
+// a nextTick recursion warning, but that's not so bad.
+
+function emitReadable(stream) {
+ const state = stream._readableState
+ debug('emitReadable', state.needReadable, state.emittedReadable)
+ state.needReadable = false
+
+ if (!state.emittedReadable) {
+ debug('emitReadable', state.flowing)
+ state.emittedReadable = true
+ process.nextTick(emitReadable_, stream)
+ }
+}
+
+function emitReadable_(stream) {
+ const state = stream._readableState
+ debug('emitReadable_', state.destroyed, state.length, state.ended)
+
+ if (!state.destroyed && !state.errored && (state.length || state.ended)) {
+ stream.emit('readable')
+ state.emittedReadable = false
+ } // The stream needs another readable event if:
+ // 1. It is not flowing, as the flow mechanism will take
+ // care of it.
+ // 2. It is not ended.
+ // 3. It is below the highWaterMark, so we can schedule
+ // another readable later.
+
+ state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark
+ flow(stream)
+} // At this point, the user has presumably seen the 'readable' event,
+// and called read() to consume some data. that may have triggered
+// in turn another _read(n) call, in which case reading = true if
+// it's in progress.
+// However, if we're not ended, or reading, and the length < hwm,
+// then go ahead and try to read some more preemptively.
+
+function maybeReadMore(stream, state) {
+ if (!state.readingMore && state.constructed) {
+ state.readingMore = true
+ process.nextTick(maybeReadMore_, stream, state)
+ }
+}
+
+function maybeReadMore_(stream, state) {
+ // Attempt to read more data if we should.
+ //
+ // The conditions for reading more data are (one of):
+ // - Not enough data buffered (state.length < state.highWaterMark). The loop
+ // is responsible for filling the buffer with enough data if such data
+ // is available. If highWaterMark is 0 and we are not in the flowing mode
+ // we should _not_ attempt to buffer any extra data. We'll get more data
+ // when the stream consumer calls read() instead.
+ // - No data in the buffer, and the stream is in flowing mode. In this mode
+ // the loop below is responsible for ensuring read() is called. Failing to
+ // call read here would abort the flow and there's no other mechanism for
+ // continuing the flow if the stream consumer has just subscribed to the
+ // 'data' event.
+ //
+ // In addition to the above conditions to keep reading data, the following
+ // conditions prevent the data from being read:
+ // - The stream has ended (state.ended).
+ // - There is already a pending 'read' operation (state.reading). This is a
+ // case where the stream has called the implementation defined _read()
+ // method, but they are processing the call asynchronously and have _not_
+ // called push() with new data. In this case we skip performing more
+ // read()s. The execution ends in this method again after the _read() ends
+ // up calling push() with more data.
+ while (
+ !state.reading &&
+ !state.ended &&
+ (state.length < state.highWaterMark || (state.flowing && state.length === 0))
+ ) {
+ const len = state.length
+ debug('maybeReadMore read 0')
+ stream.read(0)
+ if (len === state.length)
+ // Didn't get any data, stop spinning.
+ break
+ }
+
+ state.readingMore = false
+} // Abstract method. to be overridden in specific implementation classes.
+// call cb(er, data) where data is <= n in length.
+// for virtual (non-string, non-buffer) streams, "length" is somewhat
+// arbitrary, and perhaps not very meaningful.
+
+Readable.prototype._read = function (n) {
+ throw new ERR_METHOD_NOT_IMPLEMENTED('_read()')
+}
+
+Readable.prototype.pipe = function (dest, pipeOpts) {
+ const src = this
+ const state = this._readableState
+
+ if (state.pipes.length === 1) {
+ if (!state.multiAwaitDrain) {
+ state.multiAwaitDrain = true
+ state.awaitDrainWriters = new SafeSet(state.awaitDrainWriters ? [state.awaitDrainWriters] : [])
+ }
+ }
+
+ state.pipes.push(dest)
+ debug('pipe count=%d opts=%j', state.pipes.length, pipeOpts)
+ const doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr
+ const endFn = doEnd ? onend : unpipe
+ if (state.endEmitted) process.nextTick(endFn)
+ else src.once('end', endFn)
+ dest.on('unpipe', onunpipe)
+
+ function onunpipe(readable, unpipeInfo) {
+ debug('onunpipe')
+
+ if (readable === src) {
+ if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
+ unpipeInfo.hasUnpiped = true
+ cleanup()
+ }
+ }
+ }
+
+ function onend() {
+ debug('onend')
+ dest.end()
+ }
+
+ let ondrain
+ let cleanedUp = false
+
+ function cleanup() {
+ debug('cleanup') // Cleanup event handlers once the pipe is broken.
+
+ dest.removeListener('close', onclose)
+ dest.removeListener('finish', onfinish)
+
+ if (ondrain) {
+ dest.removeListener('drain', ondrain)
+ }
+
+ dest.removeListener('error', onerror)
+ dest.removeListener('unpipe', onunpipe)
+ src.removeListener('end', onend)
+ src.removeListener('end', unpipe)
+ src.removeListener('data', ondata)
+ cleanedUp = true // If the reader is waiting for a drain event from this
+ // specific writer, then it would cause it to never start
+ // flowing again.
+ // So, if this is awaiting a drain, then we just call it now.
+ // If we don't know, then assume that we are waiting for one.
+
+ if (ondrain && state.awaitDrainWriters && (!dest._writableState || dest._writableState.needDrain)) ondrain()
+ }
+
+ function pause() {
+ // If the user unpiped during `dest.write()`, it is possible
+ // to get stuck in a permanently paused state if that write
+ // also returned false.
+ // => Check whether `dest` is still a piping destination.
+ if (!cleanedUp) {
+ if (state.pipes.length === 1 && state.pipes[0] === dest) {
+ debug('false write response, pause', 0)
+ state.awaitDrainWriters = dest
+ state.multiAwaitDrain = false
+ } else if (state.pipes.length > 1 && state.pipes.includes(dest)) {
+ debug('false write response, pause', state.awaitDrainWriters.size)
+ state.awaitDrainWriters.add(dest)
+ }
+
+ src.pause()
+ }
+
+ if (!ondrain) {
+ // When the dest drains, it reduces the awaitDrain counter
+ // on the source. This would be more elegant with a .once()
+ // handler in flow(), but adding and removing repeatedly is
+ // too slow.
+ ondrain = pipeOnDrain(src, dest)
+ dest.on('drain', ondrain)
+ }
+ }
+
+ src.on('data', ondata)
+
+ function ondata(chunk) {
+ debug('ondata')
+ const ret = dest.write(chunk)
+ debug('dest.write', ret)
+
+ if (ret === false) {
+ pause()
+ }
+ } // If the dest has an error, then stop piping into it.
+ // However, don't suppress the throwing behavior for this.
+
+ function onerror(er) {
+ debug('onerror', er)
+ unpipe()
+ dest.removeListener('error', onerror)
+
+ if (dest.listenerCount('error') === 0) {
+ const s = dest._writableState || dest._readableState
+
+ if (s && !s.errorEmitted) {
+ // User incorrectly emitted 'error' directly on the stream.
+ errorOrDestroy(dest, er)
+ } else {
+ dest.emit('error', er)
+ }
+ }
+ } // Make sure our error handler is attached before userland ones.
+
+ prependListener(dest, 'error', onerror) // Both close and finish should trigger unpipe, but only once.
+
+ function onclose() {
+ dest.removeListener('finish', onfinish)
+ unpipe()
+ }
+
+ dest.once('close', onclose)
+
+ function onfinish() {
+ debug('onfinish')
+ dest.removeListener('close', onclose)
+ unpipe()
+ }
+
+ dest.once('finish', onfinish)
+
+ function unpipe() {
+ debug('unpipe')
+ src.unpipe(dest)
+ } // Tell the dest that it's being piped to.
+
+ dest.emit('pipe', src) // Start the flow if it hasn't been started already.
+
+ if (dest.writableNeedDrain === true) {
+ if (state.flowing) {
+ pause()
+ }
+ } else if (!state.flowing) {
+ debug('pipe resume')
+ src.resume()
+ }
+
+ return dest
+}
+
+function pipeOnDrain(src, dest) {
+ return function pipeOnDrainFunctionResult() {
+ const state = src._readableState // `ondrain` will call directly,
+ // `this` maybe not a reference to dest,
+ // so we use the real dest here.
+
+ if (state.awaitDrainWriters === dest) {
+ debug('pipeOnDrain', 1)
+ state.awaitDrainWriters = null
+ } else if (state.multiAwaitDrain) {
+ debug('pipeOnDrain', state.awaitDrainWriters.size)
+ state.awaitDrainWriters.delete(dest)
+ }
+
+ if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && src.listenerCount('data')) {
+ src.resume()
+ }
+ }
+}
+
+Readable.prototype.unpipe = function (dest) {
+ const state = this._readableState
+ const unpipeInfo = {
+ hasUnpiped: false
+ } // If we're not piping anywhere, then do nothing.
+
+ if (state.pipes.length === 0) return this
+
+ if (!dest) {
+ // remove all.
+ const dests = state.pipes
+ state.pipes = []
+ this.pause()
+
+ for (let i = 0; i < dests.length; i++)
+ dests[i].emit('unpipe', this, {
+ hasUnpiped: false
+ })
+
+ return this
+ } // Try to find the right one.
+
+ const index = ArrayPrototypeIndexOf(state.pipes, dest)
+ if (index === -1) return this
+ state.pipes.splice(index, 1)
+ if (state.pipes.length === 0) this.pause()
+ dest.emit('unpipe', this, unpipeInfo)
+ return this
+} // Set up data events if they are asked for
+// Ensure readable listeners eventually get something.
+
+Readable.prototype.on = function (ev, fn) {
+ const res = Stream.prototype.on.call(this, ev, fn)
+ const state = this._readableState
+
+ if (ev === 'data') {
+ // Update readableListening so that resume() may be a no-op
+ // a few lines down. This is needed to support once('readable').
+ state.readableListening = this.listenerCount('readable') > 0 // Try start flowing on next tick if stream isn't explicitly paused.
+
+ if (state.flowing !== false) this.resume()
+ } else if (ev === 'readable') {
+ if (!state.endEmitted && !state.readableListening) {
+ state.readableListening = state.needReadable = true
+ state.flowing = false
+ state.emittedReadable = false
+ debug('on readable', state.length, state.reading)
+
+ if (state.length) {
+ emitReadable(this)
+ } else if (!state.reading) {
+ process.nextTick(nReadingNextTick, this)
+ }
+ }
+ }
+
+ return res
+}
+
+Readable.prototype.addListener = Readable.prototype.on
+
+Readable.prototype.removeListener = function (ev, fn) {
+ const res = Stream.prototype.removeListener.call(this, ev, fn)
+
+ if (ev === 'readable') {
+ // We need to check if there is someone still listening to
+ // readable and reset the state. However this needs to happen
+ // after readable has been emitted but before I/O (nextTick) to
+ // support once('readable', fn) cycles. This means that calling
+ // resume within the same tick will have no
+ // effect.
+ process.nextTick(updateReadableListening, this)
+ }
+
+ return res
+}
+
+Readable.prototype.off = Readable.prototype.removeListener
+
+Readable.prototype.removeAllListeners = function (ev) {
+ const res = Stream.prototype.removeAllListeners.apply(this, arguments)
+
+ if (ev === 'readable' || ev === undefined) {
+ // We need to check if there is someone still listening to
+ // readable and reset the state. However this needs to happen
+ // after readable has been emitted but before I/O (nextTick) to
+ // support once('readable', fn) cycles. This means that calling
+ // resume within the same tick will have no
+ // effect.
+ process.nextTick(updateReadableListening, this)
+ }
+
+ return res
+}
+
+function updateReadableListening(self) {
+ const state = self._readableState
+ state.readableListening = self.listenerCount('readable') > 0
+
+ if (state.resumeScheduled && state[kPaused] === false) {
+ // Flowing needs to be set to true now, otherwise
+ // the upcoming resume will not flow.
+ state.flowing = true // Crude way to check if we should resume.
+ } else if (self.listenerCount('data') > 0) {
+ self.resume()
+ } else if (!state.readableListening) {
+ state.flowing = null
+ }
+}
+
+function nReadingNextTick(self) {
+ debug('readable nexttick read 0')
+ self.read(0)
+} // pause() and resume() are remnants of the legacy readable stream API
+// If the user uses them, then switch into old mode.
+
+Readable.prototype.resume = function () {
+ const state = this._readableState
+
+ if (!state.flowing) {
+ debug('resume') // We flow only if there is no one listening
+ // for readable, but we still have to call
+ // resume().
+
+ state.flowing = !state.readableListening
+ resume(this, state)
+ }
+
+ state[kPaused] = false
+ return this
+}
+
+function resume(stream, state) {
+ if (!state.resumeScheduled) {
+ state.resumeScheduled = true
+ process.nextTick(resume_, stream, state)
+ }
+}
+
+function resume_(stream, state) {
+ debug('resume', state.reading)
+
+ if (!state.reading) {
+ stream.read(0)
+ }
+
+ state.resumeScheduled = false
+ stream.emit('resume')
+ flow(stream)
+ if (state.flowing && !state.reading) stream.read(0)
+}
+
+Readable.prototype.pause = function () {
+ debug('call pause flowing=%j', this._readableState.flowing)
+
+ if (this._readableState.flowing !== false) {
+ debug('pause')
+ this._readableState.flowing = false
+ this.emit('pause')
+ }
+
+ this._readableState[kPaused] = true
+ return this
+}
+
+function flow(stream) {
+ const state = stream._readableState
+ debug('flow', state.flowing)
+
+ while (state.flowing && stream.read() !== null);
+} // Wrap an old-style stream as the async data source.
+// This is *not* part of the readable stream interface.
+// It is an ugly unfortunate mess of history.
+
+Readable.prototype.wrap = function (stream) {
+ let paused = false // TODO (ronag): Should this.destroy(err) emit
+ // 'error' on the wrapped stream? Would require
+ // a static factory method, e.g. Readable.wrap(stream).
+
+ stream.on('data', (chunk) => {
+ if (!this.push(chunk) && stream.pause) {
+ paused = true
+ stream.pause()
+ }
+ })
+ stream.on('end', () => {
+ this.push(null)
+ })
+ stream.on('error', (err) => {
+ errorOrDestroy(this, err)
+ })
+ stream.on('close', () => {
+ this.destroy()
+ })
+ stream.on('destroy', () => {
+ this.destroy()
+ })
+
+ this._read = () => {
+ if (paused && stream.resume) {
+ paused = false
+ stream.resume()
+ }
+ } // Proxy all the other methods. Important when wrapping filters and duplexes.
+
+ const streamKeys = ObjectKeys(stream)
+
+ for (let j = 1; j < streamKeys.length; j++) {
+ const i = streamKeys[j]
+
+ if (this[i] === undefined && typeof stream[i] === 'function') {
+ this[i] = stream[i].bind(stream)
+ }
+ }
+
+ return this
+}
+
+Readable.prototype[SymbolAsyncIterator] = function () {
+ return streamToAsyncIterator(this)
+}
+
+Readable.prototype.iterator = function (options) {
+ if (options !== undefined) {
+ validateObject(options, 'options')
+ }
+
+ return streamToAsyncIterator(this, options)
+}
+
+function streamToAsyncIterator(stream, options) {
+ if (typeof stream.read !== 'function') {
+ stream = Readable.wrap(stream, {
+ objectMode: true
+ })
+ }
+
+ const iter = createAsyncIterator(stream, options)
+ iter.stream = stream
+ return iter
+}
+
+async function* createAsyncIterator(stream, options) {
+ let callback = nop
+
+ function next(resolve) {
+ if (this === stream) {
+ callback()
+ callback = nop
+ } else {
+ callback = resolve
+ }
+ }
+
+ stream.on('readable', next)
+ let error
+ const cleanup = eos(
+ stream,
+ {
+ writable: false
+ },
+ (err) => {
+ error = err ? aggregateTwoErrors(error, err) : null
+ callback()
+ callback = nop
+ }
+ )
+
+ try {
+ while (true) {
+ const chunk = stream.destroyed ? null : stream.read()
+
+ if (chunk !== null) {
+ yield chunk
+ } else if (error) {
+ throw error
+ } else if (error === null) {
+ return
+ } else {
+ await new Promise(next)
+ }
+ }
+ } catch (err) {
+ error = aggregateTwoErrors(error, err)
+ throw error
+ } finally {
+ if (
+ (error || (options === null || options === undefined ? undefined : options.destroyOnReturn) !== false) &&
+ (error === undefined || stream._readableState.autoDestroy)
+ ) {
+ destroyImpl.destroyer(stream, null)
+ } else {
+ stream.off('readable', next)
+ cleanup()
+ }
+ }
+} // Making it explicit these properties are not enumerable
+// because otherwise some prototype manipulation in
+// userland will fail.
+
+ObjectDefineProperties(Readable.prototype, {
+ readable: {
+ get() {
+ const r = this._readableState // r.readable === false means that this is part of a Duplex stream
+ // where the readable side was disabled upon construction.
+ // Compat. The user might manually disable readable side through
+ // deprecated setter.
+
+ return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && !r.endEmitted
+ },
+
+ set(val) {
+ // Backwards compat.
+ if (this._readableState) {
+ this._readableState.readable = !!val
+ }
+ }
+ },
+ readableDidRead: {
+ enumerable: false,
+ get: function () {
+ return this._readableState.dataEmitted
+ }
+ },
+ readableAborted: {
+ enumerable: false,
+ get: function () {
+ return !!(
+ this._readableState.readable !== false &&
+ (this._readableState.destroyed || this._readableState.errored) &&
+ !this._readableState.endEmitted
+ )
+ }
+ },
+ readableHighWaterMark: {
+ enumerable: false,
+ get: function () {
+ return this._readableState.highWaterMark
+ }
+ },
+ readableBuffer: {
+ enumerable: false,
+ get: function () {
+ return this._readableState && this._readableState.buffer
+ }
+ },
+ readableFlowing: {
+ enumerable: false,
+ get: function () {
+ return this._readableState.flowing
+ },
+ set: function (state) {
+ if (this._readableState) {
+ this._readableState.flowing = state
+ }
+ }
+ },
+ readableLength: {
+ enumerable: false,
+
+ get() {
+ return this._readableState.length
+ }
+ },
+ readableObjectMode: {
+ enumerable: false,
+
+ get() {
+ return this._readableState ? this._readableState.objectMode : false
+ }
+ },
+ readableEncoding: {
+ enumerable: false,
+
+ get() {
+ return this._readableState ? this._readableState.encoding : null
+ }
+ },
+ errored: {
+ enumerable: false,
+
+ get() {
+ return this._readableState ? this._readableState.errored : null
+ }
+ },
+ closed: {
+ get() {
+ return this._readableState ? this._readableState.closed : false
+ }
+ },
+ destroyed: {
+ enumerable: false,
+
+ get() {
+ return this._readableState ? this._readableState.destroyed : false
+ },
+
+ set(value) {
+ // We ignore the value if the stream
+ // has not been initialized yet.
+ if (!this._readableState) {
+ return
+ } // Backward compatibility, the user is explicitly
+ // managing destroyed.
+
+ this._readableState.destroyed = value
+ }
+ },
+ readableEnded: {
+ enumerable: false,
+
+ get() {
+ return this._readableState ? this._readableState.endEmitted : false
+ }
+ }
+})
+ObjectDefineProperties(ReadableState.prototype, {
+ // Legacy getter for `pipesCount`.
+ pipesCount: {
+ get() {
+ return this.pipes.length
+ }
+ },
+ // Legacy property for `paused`.
+ paused: {
+ get() {
+ return this[kPaused] !== false
+ },
+
+ set(value) {
+ this[kPaused] = !!value
+ }
+ }
+}) // Exposed for testing purposes only.
+
+Readable._fromList = fromList // Pluck off n bytes from an array of buffers.
+// Length is the combined lengths of all the buffers in the list.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+
+function fromList(n, state) {
+ // nothing buffered.
+ if (state.length === 0) return null
+ let ret
+ if (state.objectMode) ret = state.buffer.shift()
+ else if (!n || n >= state.length) {
+ // Read it all, truncate the list.
+ if (state.decoder) ret = state.buffer.join('')
+ else if (state.buffer.length === 1) ret = state.buffer.first()
+ else ret = state.buffer.concat(state.length)
+ state.buffer.clear()
+ } else {
+ // read part of list.
+ ret = state.buffer.consume(n, state.decoder)
+ }
+ return ret
+}
+
+function endReadable(stream) {
+ const state = stream._readableState
+ debug('endReadable', state.endEmitted)
+
+ if (!state.endEmitted) {
+ state.ended = true
+ process.nextTick(endReadableNT, state, stream)
+ }
+}
+
+function endReadableNT(state, stream) {
+ debug('endReadableNT', state.endEmitted, state.length) // Check that we didn't get one last unshift.
+
+ if (!state.errored && !state.closeEmitted && !state.endEmitted && state.length === 0) {
+ state.endEmitted = true
+ stream.emit('end')
+
+ if (stream.writable && stream.allowHalfOpen === false) {
+ process.nextTick(endWritableNT, stream)
+ } else if (state.autoDestroy) {
+ // In case of duplex streams we need a way to detect
+ // if the writable side is ready for autoDestroy as well.
+ const wState = stream._writableState
+ const autoDestroy =
+ !wState ||
+ (wState.autoDestroy && // We don't expect the writable to ever 'finish'
+ // if writable is explicitly set to false.
+ (wState.finished || wState.writable === false))
+
+ if (autoDestroy) {
+ stream.destroy()
+ }
+ }
+ }
+}
+
+function endWritableNT(stream) {
+ const writable = stream.writable && !stream.writableEnded && !stream.destroyed
+
+ if (writable) {
+ stream.end()
+ }
+}
+
+Readable.from = function (iterable, opts) {
+ return from(Readable, iterable, opts)
+}
+
+let webStreamsAdapters // Lazy to avoid circular references
+
+function lazyWebStreams() {
+ if (webStreamsAdapters === undefined) webStreamsAdapters = {}
+ return webStreamsAdapters
+}
+
+Readable.fromWeb = function (readableStream, options) {
+ return lazyWebStreams().newStreamReadableFromReadableStream(readableStream, options)
+}
+
+Readable.toWeb = function (streamReadable) {
+ return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable)
+}
+
+Readable.wrap = function (src, options) {
+ var _ref, _src$readableObjectMo
+
+ return new Readable({
+ objectMode:
+ (_ref =
+ (_src$readableObjectMo = src.readableObjectMode) !== null && _src$readableObjectMo !== undefined
+ ? _src$readableObjectMo
+ : src.objectMode) !== null && _ref !== undefined
+ ? _ref
+ : true,
+ ...options,
+
+ destroy(err, callback) {
+ destroyImpl.destroyer(src, err)
+ callback(err)
+ }
+ }).wrap(src)
+}
diff --git a/lib/internal/streams/state.js b/lib/internal/streams/state.js
index 19887eb8a9..e7fcebdde9 100644
--- a/lib/internal/streams/state.js
+++ b/lib/internal/streams/state.js
@@ -1,27 +1,33 @@
-'use strict';
+'use strict'
-var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;
+const { MathFloor, NumberIsInteger } = require('../../ours/primordials')
+
+const { ERR_INVALID_ARG_VALUE } = require('../../ours/errors').codes
function highWaterMarkFrom(options, isDuplex, duplexKey) {
- return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;
+ return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null
+}
+
+function getDefaultHighWaterMark(objectMode) {
+ return objectMode ? 16 : 16 * 1024
}
function getHighWaterMark(state, options, duplexKey, isDuplex) {
- var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
+ const hwm = highWaterMarkFrom(options, isDuplex, duplexKey)
if (hwm != null) {
- if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {
- var name = isDuplex ? duplexKey : 'highWaterMark';
- throw new ERR_INVALID_OPT_VALUE(name, hwm);
+ if (!NumberIsInteger(hwm) || hwm < 0) {
+ const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark'
+ throw new ERR_INVALID_ARG_VALUE(name, hwm)
}
- return Math.floor(hwm);
+ return MathFloor(hwm)
} // Default value
-
- return state.objectMode ? 16 : 16 * 1024;
+ return getDefaultHighWaterMark(state.objectMode)
}
module.exports = {
- getHighWaterMark: getHighWaterMark
-};
\ No newline at end of file
+ getHighWaterMark,
+ getDefaultHighWaterMark
+}
diff --git a/lib/internal/streams/stream-browser.js b/lib/internal/streams/stream-browser.js
deleted file mode 100644
index 9332a3fdae..0000000000
--- a/lib/internal/streams/stream-browser.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('events').EventEmitter;
diff --git a/lib/internal/streams/stream.js b/lib/internal/streams/stream.js
deleted file mode 100644
index ce2ad5b6ee..0000000000
--- a/lib/internal/streams/stream.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('stream');
diff --git a/lib/internal/streams/transform.js b/lib/internal/streams/transform.js
new file mode 100644
index 0000000000..5a34c250be
--- /dev/null
+++ b/lib/internal/streams/transform.js
@@ -0,0 +1,175 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+// a transform stream is a readable/writable stream where you do
+// something with the data. Sometimes it's called a "filter",
+// but that's not a great name for it, since that implies a thing where
+// some bits pass through, and others are simply ignored. (That would
+// be a valid example of a transform, of course.)
+//
+// While the output is causally related to the input, it's not a
+// necessarily symmetric or synchronous transformation. For example,
+// a zlib stream might take multiple plain-text writes(), and then
+// emit a single compressed chunk some time in the future.
+//
+// Here's how this works:
+//
+// The Transform stream has all the aspects of the readable and writable
+// stream classes. When you write(chunk), that calls _write(chunk,cb)
+// internally, and returns false if there's a lot of pending writes
+// buffered up. When you call read(), that calls _read(n) until
+// there's enough pending readable data buffered up.
+//
+// In a transform stream, the written data is placed in a buffer. When
+// _read(n) is called, it transforms the queued up data, calling the
+// buffered _write cb's as it consumes chunks. If consuming a single
+// written chunk would result in multiple output chunks, then the first
+// outputted bit calls the readcb, and subsequent chunks just go into
+// the read buffer, and will cause it to emit 'readable' if necessary.
+//
+// This way, back-pressure is actually determined by the reading side,
+// since _read has to be called to start processing a new chunk. However,
+// a pathological inflate type of transform can cause excessive buffering
+// here. For example, imagine a stream where every byte of input is
+// interpreted as an integer from 0-255, and then results in that many
+// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
+// 1kb of data being output. In this case, you could write a very small
+// amount of input, and end up with a very large amount of output. In
+// such a pathological inflating mechanism, there'd be no way to tell
+// the system to stop doing the transform. A single 4MB write could
+// cause the system to run out of memory.
+//
+// However, even in such a pathological case, only a single written chunk
+// would be consumed, and then the rest would wait (un-transformed) until
+// the results of the previous transformed chunk were consumed.
+'use strict'
+
+const { ObjectSetPrototypeOf, Symbol } = require('../../ours/primordials')
+
+module.exports = Transform
+
+const { ERR_METHOD_NOT_IMPLEMENTED } = require('../../ours/errors').codes
+
+const Duplex = require('./duplex')
+
+ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype)
+ObjectSetPrototypeOf(Transform, Duplex)
+const kCallback = Symbol('kCallback')
+
+function Transform(options) {
+ if (!(this instanceof Transform)) return new Transform(options)
+ Duplex.call(this, options) // We have implemented the _read method, and done the other things
+ // that Readable wants before the first _read call, so unset the
+ // sync guard flag.
+
+ this._readableState.sync = false
+ this[kCallback] = null
+
+ if (options) {
+ if (typeof options.transform === 'function') this._transform = options.transform
+ if (typeof options.flush === 'function') this._flush = options.flush
+ } // When the writable side finishes, then flush out anything remaining.
+ // Backwards compat. Some Transform streams incorrectly implement _final
+ // instead of or in addition to _flush. By using 'prefinish' instead of
+ // implementing _final we continue supporting this unfortunate use case.
+
+ this.on('prefinish', prefinish)
+}
+
+function final(cb) {
+ if (typeof this._flush === 'function' && !this.destroyed) {
+ this._flush((er, data) => {
+ if (er) {
+ if (cb) {
+ cb(er)
+ } else {
+ this.destroy(er)
+ }
+
+ return
+ }
+
+ if (data != null) {
+ this.push(data)
+ }
+
+ this.push(null)
+
+ if (cb) {
+ cb()
+ }
+ })
+ } else {
+ this.push(null)
+
+ if (cb) {
+ cb()
+ }
+ }
+}
+
+function prefinish() {
+ if (this._final !== final) {
+ final.call(this)
+ }
+}
+
+Transform.prototype._final = final
+
+Transform.prototype._transform = function (chunk, encoding, callback) {
+ throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()')
+}
+
+Transform.prototype._write = function (chunk, encoding, callback) {
+ const rState = this._readableState
+ const wState = this._writableState
+ const length = rState.length
+
+ this._transform(chunk, encoding, (err, val) => {
+ if (err) {
+ callback(err)
+ return
+ }
+
+ if (val != null) {
+ this.push(val)
+ }
+
+ if (
+ wState.ended || // Backwards compat.
+ length === rState.length || // Backwards compat.
+ rState.length < rState.highWaterMark ||
+ rState.highWaterMark === 0 ||
+ rState.length === 0
+ ) {
+ callback()
+ } else {
+ this[kCallback] = callback
+ }
+ })
+}
+
+Transform.prototype._read = function () {
+ if (this[kCallback]) {
+ const callback = this[kCallback]
+ this[kCallback] = null
+ callback()
+ }
+}
diff --git a/lib/internal/streams/utils.js b/lib/internal/streams/utils.js
new file mode 100644
index 0000000000..b1aa7d8170
--- /dev/null
+++ b/lib/internal/streams/utils.js
@@ -0,0 +1,328 @@
+'use strict'
+
+const { Symbol, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials')
+
+const kDestroyed = Symbol('kDestroyed')
+const kIsErrored = Symbol('kIsErrored')
+const kIsReadable = Symbol('kIsReadable')
+const kIsDisturbed = Symbol('kIsDisturbed')
+
+function isReadableNodeStream(obj, strict = false) {
+ var _obj$_readableState
+
+ return !!(
+ (
+ obj &&
+ typeof obj.pipe === 'function' &&
+ typeof obj.on === 'function' &&
+ (!strict || (typeof obj.pause === 'function' && typeof obj.resume === 'function')) &&
+ (!obj._writableState ||
+ ((_obj$_readableState = obj._readableState) === null || _obj$_readableState === undefined
+ ? undefined
+ : _obj$_readableState.readable) !== false) && // Duplex
+ (!obj._writableState || obj._readableState)
+ ) // Writable has .pipe.
+ )
+}
+
+function isWritableNodeStream(obj) {
+ var _obj$_writableState
+
+ return !!(
+ (
+ obj &&
+ typeof obj.write === 'function' &&
+ typeof obj.on === 'function' &&
+ (!obj._readableState ||
+ ((_obj$_writableState = obj._writableState) === null || _obj$_writableState === undefined
+ ? undefined
+ : _obj$_writableState.writable) !== false)
+ ) // Duplex
+ )
+}
+
+function isDuplexNodeStream(obj) {
+ return !!(
+ obj &&
+ typeof obj.pipe === 'function' &&
+ obj._readableState &&
+ typeof obj.on === 'function' &&
+ typeof obj.write === 'function'
+ )
+}
+
+function isNodeStream(obj) {
+ return (
+ obj &&
+ (obj._readableState ||
+ obj._writableState ||
+ (typeof obj.write === 'function' && typeof obj.on === 'function') ||
+ (typeof obj.pipe === 'function' && typeof obj.on === 'function'))
+ )
+}
+
+function isIterable(obj, isAsync) {
+ if (obj == null) return false
+ if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function'
+ if (isAsync === false) return typeof obj[SymbolIterator] === 'function'
+ return typeof obj[SymbolAsyncIterator] === 'function' || typeof obj[SymbolIterator] === 'function'
+}
+
+function isDestroyed(stream) {
+ if (!isNodeStream(stream)) return null
+ const wState = stream._writableState
+ const rState = stream._readableState
+ const state = wState || rState
+ return !!(stream.destroyed || stream[kDestroyed] || (state !== null && state !== undefined && state.destroyed))
+} // Have been end():d.
+
+function isWritableEnded(stream) {
+ if (!isWritableNodeStream(stream)) return null
+ if (stream.writableEnded === true) return true
+ const wState = stream._writableState
+ if (wState !== null && wState !== undefined && wState.errored) return false
+ if (typeof (wState === null || wState === undefined ? undefined : wState.ended) !== 'boolean') return null
+ return wState.ended
+} // Have emitted 'finish'.
+
+function isWritableFinished(stream, strict) {
+ if (!isWritableNodeStream(stream)) return null
+ if (stream.writableFinished === true) return true
+ const wState = stream._writableState
+ if (wState !== null && wState !== undefined && wState.errored) return false
+ if (typeof (wState === null || wState === undefined ? undefined : wState.finished) !== 'boolean') return null
+ return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0))
+} // Have been push(null):d.
+
+function isReadableEnded(stream) {
+ if (!isReadableNodeStream(stream)) return null
+ if (stream.readableEnded === true) return true
+ const rState = stream._readableState
+ if (!rState || rState.errored) return false
+ if (typeof (rState === null || rState === undefined ? undefined : rState.ended) !== 'boolean') return null
+ return rState.ended
+} // Have emitted 'end'.
+
+function isReadableFinished(stream, strict) {
+ if (!isReadableNodeStream(stream)) return null
+ const rState = stream._readableState
+ if (rState !== null && rState !== undefined && rState.errored) return false
+ if (typeof (rState === null || rState === undefined ? undefined : rState.endEmitted) !== 'boolean') return null
+ return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0))
+}
+
+function isReadable(stream) {
+ if (stream && stream[kIsReadable] != null) return stream[kIsReadable]
+ if (typeof (stream === null || stream === undefined ? undefined : stream.readable) !== 'boolean') return null
+ if (isDestroyed(stream)) return false
+ return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream)
+}
+
+function isWritable(stream) {
+ if (typeof (stream === null || stream === undefined ? undefined : stream.writable) !== 'boolean') return null
+ if (isDestroyed(stream)) return false
+ return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream)
+}
+
+function isFinished(stream, opts) {
+ if (!isNodeStream(stream)) {
+ return null
+ }
+
+ if (isDestroyed(stream)) {
+ return true
+ }
+
+ if ((opts === null || opts === undefined ? undefined : opts.readable) !== false && isReadable(stream)) {
+ return false
+ }
+
+ if ((opts === null || opts === undefined ? undefined : opts.writable) !== false && isWritable(stream)) {
+ return false
+ }
+
+ return true
+}
+
+function isWritableErrored(stream) {
+ var _stream$_writableStat, _stream$_writableStat2
+
+ if (!isNodeStream(stream)) {
+ return null
+ }
+
+ if (stream.writableErrored) {
+ return stream.writableErrored
+ }
+
+ return (_stream$_writableStat =
+ (_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === undefined
+ ? undefined
+ : _stream$_writableStat2.errored) !== null && _stream$_writableStat !== undefined
+ ? _stream$_writableStat
+ : null
+}
+
+function isReadableErrored(stream) {
+ var _stream$_readableStat, _stream$_readableStat2
+
+ if (!isNodeStream(stream)) {
+ return null
+ }
+
+ if (stream.readableErrored) {
+ return stream.readableErrored
+ }
+
+ return (_stream$_readableStat =
+ (_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === undefined
+ ? undefined
+ : _stream$_readableStat2.errored) !== null && _stream$_readableStat !== undefined
+ ? _stream$_readableStat
+ : null
+}
+
+function isClosed(stream) {
+ if (!isNodeStream(stream)) {
+ return null
+ }
+
+ if (typeof stream.closed === 'boolean') {
+ return stream.closed
+ }
+
+ const wState = stream._writableState
+ const rState = stream._readableState
+
+ if (
+ typeof (wState === null || wState === undefined ? undefined : wState.closed) === 'boolean' ||
+ typeof (rState === null || rState === undefined ? undefined : rState.closed) === 'boolean'
+ ) {
+ return (
+ (wState === null || wState === undefined ? undefined : wState.closed) ||
+ (rState === null || rState === undefined ? undefined : rState.closed)
+ )
+ }
+
+ if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) {
+ return stream._closed
+ }
+
+ return null
+}
+
+function isOutgoingMessage(stream) {
+ return (
+ typeof stream._closed === 'boolean' &&
+ typeof stream._defaultKeepAlive === 'boolean' &&
+ typeof stream._removedConnection === 'boolean' &&
+ typeof stream._removedContLen === 'boolean'
+ )
+}
+
+function isServerResponse(stream) {
+ return typeof stream._sent100 === 'boolean' && isOutgoingMessage(stream)
+}
+
+function isServerRequest(stream) {
+ var _stream$req
+
+ return (
+ typeof stream._consuming === 'boolean' &&
+ typeof stream._dumped === 'boolean' &&
+ ((_stream$req = stream.req) === null || _stream$req === undefined ? undefined : _stream$req.upgradeOrConnect) ===
+ undefined
+ )
+}
+
+function willEmitClose(stream) {
+ if (!isNodeStream(stream)) return null
+ const wState = stream._writableState
+ const rState = stream._readableState
+ const state = wState || rState
+ return (
+ (!state && isServerResponse(stream)) || !!(state && state.autoDestroy && state.emitClose && state.closed === false)
+ )
+}
+
+function isDisturbed(stream) {
+ var _stream$kIsDisturbed
+
+ return !!(
+ stream &&
+ ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== undefined
+ ? _stream$kIsDisturbed
+ : stream.readableDidRead || stream.readableAborted)
+ )
+}
+
+function isErrored(stream) {
+ var _ref,
+ _ref2,
+ _ref3,
+ _ref4,
+ _ref5,
+ _stream$kIsErrored,
+ _stream$_readableStat3,
+ _stream$_writableStat3,
+ _stream$_readableStat4,
+ _stream$_writableStat4
+
+ return !!(
+ stream &&
+ ((_ref =
+ (_ref2 =
+ (_ref3 =
+ (_ref4 =
+ (_ref5 =
+ (_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== undefined
+ ? _stream$kIsErrored
+ : stream.readableErrored) !== null && _ref5 !== undefined
+ ? _ref5
+ : stream.writableErrored) !== null && _ref4 !== undefined
+ ? _ref4
+ : (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === undefined
+ ? undefined
+ : _stream$_readableStat3.errorEmitted) !== null && _ref3 !== undefined
+ ? _ref3
+ : (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === undefined
+ ? undefined
+ : _stream$_writableStat3.errorEmitted) !== null && _ref2 !== undefined
+ ? _ref2
+ : (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === undefined
+ ? undefined
+ : _stream$_readableStat4.errored) !== null && _ref !== undefined
+ ? _ref
+ : (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === undefined
+ ? undefined
+ : _stream$_writableStat4.errored)
+ )
+}
+
+module.exports = {
+ kDestroyed,
+ isDisturbed,
+ kIsDisturbed,
+ isErrored,
+ kIsErrored,
+ isReadable,
+ kIsReadable,
+ isClosed,
+ isDestroyed,
+ isDuplexNodeStream,
+ isFinished,
+ isIterable,
+ isReadableNodeStream,
+ isReadableEnded,
+ isReadableFinished,
+ isReadableErrored,
+ isNodeStream,
+ isWritable,
+ isWritableNodeStream,
+ isWritableEnded,
+ isWritableFinished,
+ isWritableErrored,
+ isServerRequest,
+ isServerResponse,
+ willEmitClose
+}
diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js
new file mode 100644
index 0000000000..ac97ec0d17
--- /dev/null
+++ b/lib/internal/streams/writable.js
@@ -0,0 +1,860 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+// A bit simpler than readable streams.
+// Implement an async ._write(chunk, encoding, cb), and it'll handle all
+// the drain event emission and buffering.
+'use strict'
+
+const {
+ ArrayPrototypeSlice,
+ Error,
+ FunctionPrototypeSymbolHasInstance,
+ ObjectDefineProperty,
+ ObjectDefineProperties,
+ ObjectSetPrototypeOf,
+ StringPrototypeToLowerCase,
+ Symbol,
+ SymbolHasInstance
+} = require('../../ours/primordials')
+
+module.exports = Writable
+Writable.WritableState = WritableState
+
+const { EventEmitter: EE } = require('events')
+
+const Stream = require('./legacy').Stream
+
+const destroyImpl = require('./destroy')
+
+const { addAbortSignal } = require('./add-abort-signal')
+
+const { getHighWaterMark, getDefaultHighWaterMark } = require('./state')
+
+const {
+ ERR_INVALID_ARG_TYPE,
+ ERR_METHOD_NOT_IMPLEMENTED,
+ ERR_MULTIPLE_CALLBACK,
+ ERR_STREAM_CANNOT_PIPE,
+ ERR_STREAM_DESTROYED,
+ ERR_STREAM_ALREADY_FINISHED,
+ ERR_STREAM_NULL_VALUES,
+ ERR_STREAM_WRITE_AFTER_END,
+ ERR_UNKNOWN_ENCODING
+} = require('../../ours/errors').codes
+
+const { errorOrDestroy } = destroyImpl
+ObjectSetPrototypeOf(Writable.prototype, Stream.prototype)
+ObjectSetPrototypeOf(Writable, Stream)
+
+function nop() {}
+
+const kOnFinished = Symbol('kOnFinished')
+
+function WritableState(options, stream, isDuplex) {
+ // Duplex streams are both readable and writable, but share
+ // the same options object.
+ // However, some cases require setting options to different
+ // values for the readable and the writable sides of the duplex stream,
+ // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
+ if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex') // Object stream flag to indicate whether or not this stream
+ // contains buffers or objects.
+
+ this.objectMode = !!(options && options.objectMode)
+ if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode) // The point at which write() starts returning false
+ // Note: 0 is a valid value, means that we always return false if
+ // the entire buffer is not flushed immediately on write().
+
+ this.highWaterMark = options
+ ? getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex)
+ : getDefaultHighWaterMark(false) // if _final has been called.
+
+ this.finalCalled = false // drain event flag.
+
+ this.needDrain = false // At the start of calling end()
+
+ this.ending = false // When end() has been called, and returned.
+
+ this.ended = false // When 'finish' is emitted.
+
+ this.finished = false // Has it been destroyed
+
+ this.destroyed = false // Should we decode strings into buffers before passing to _write?
+ // this is here so that some node-core streams can optimize string
+ // handling at a lower level.
+
+ const noDecode = !!(options && options.decodeStrings === false)
+ this.decodeStrings = !noDecode // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+
+ this.defaultEncoding = (options && options.defaultEncoding) || 'utf8' // Not an actual buffer we keep track of, but a measurement
+ // of how much we're waiting to get pushed to some underlying
+ // socket or file.
+
+ this.length = 0 // A flag to see when we're in the middle of a write.
+
+ this.writing = false // When true all writes will be buffered until .uncork() call.
+
+ this.corked = 0 // A flag to be able to tell if the onwrite cb is called immediately,
+ // or on a later tick. We set this to true at first, because any
+ // actions that shouldn't happen until "later" should generally also
+ // not happen before the first write call.
+
+ this.sync = true // A flag to know if we're processing previously buffered items, which
+ // may call the _write() callback in the same tick, so that we don't
+ // end up in an overlapped onwrite situation.
+
+ this.bufferProcessing = false // The callback that's passed to _write(chunk, cb).
+
+ this.onwrite = onwrite.bind(undefined, stream) // The callback that the user supplies to write(chunk, encoding, cb).
+
+ this.writecb = null // The amount that is being written when _write is called.
+
+ this.writelen = 0 // Storage for data passed to the afterWrite() callback in case of
+ // synchronous _write() completion.
+
+ this.afterWriteTickInfo = null
+ resetBuffer(this) // Number of pending user-supplied write callbacks
+ // this must be 0 before 'finish' can be emitted.
+
+ this.pendingcb = 0 // Stream is still being constructed and cannot be
+ // destroyed until construction finished or failed.
+ // Async construction is opt in, therefore we start as
+ // constructed.
+
+ this.constructed = true // Emit prefinish if the only thing we're waiting for is _write cbs
+ // This is relevant for synchronous Transform streams.
+
+ this.prefinished = false // True if the error was already emitted and should not be thrown again.
+
+ this.errorEmitted = false // Should close be emitted on destroy. Defaults to true.
+
+ this.emitClose = !options || options.emitClose !== false // Should .destroy() be called after 'finish' (and potentially 'end').
+
+ this.autoDestroy = !options || options.autoDestroy !== false // Indicates whether the stream has errored. When true all write() calls
+ // should return false. This is needed since when autoDestroy
+ // is disabled we need a way to tell whether the stream has failed.
+
+ this.errored = null // Indicates whether the stream has finished destroying.
+
+ this.closed = false // True if close has been emitted or would have been emitted
+ // depending on emitClose.
+
+ this.closeEmitted = false
+ this[kOnFinished] = []
+}
+
+function resetBuffer(state) {
+ state.buffered = []
+ state.bufferedIndex = 0
+ state.allBuffers = true
+ state.allNoop = true
+}
+
+WritableState.prototype.getBuffer = function getBuffer() {
+ return ArrayPrototypeSlice(this.buffered, this.bufferedIndex)
+}
+
+ObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', {
+ get() {
+ return this.buffered.length - this.bufferedIndex
+ }
+})
+
+function Writable(options) {
+ // Writable ctor is applied to Duplexes, too.
+ // `realHasInstance` is necessary because using plain `instanceof`
+ // would return false, as no `_writableState` property is attached.
+ // Trying to use the custom `instanceof` for Writable here will also break the
+ // Node.js LazyTransform implementation, which has a non-trivial getter for
+ // `_writableState` that would lead to infinite recursion.
+ // Checking for a Stream.Duplex instance is faster here instead of inside
+ // the WritableState constructor, at least with V8 6.5.
+ const isDuplex = this instanceof require('./duplex')
+
+ if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options)
+ this._writableState = new WritableState(options, this, isDuplex)
+
+ if (options) {
+ if (typeof options.write === 'function') this._write = options.write
+ if (typeof options.writev === 'function') this._writev = options.writev
+ if (typeof options.destroy === 'function') this._destroy = options.destroy
+ if (typeof options.final === 'function') this._final = options.final
+ if (typeof options.construct === 'function') this._construct = options.construct
+ if (options.signal) addAbortSignal(options.signal, this)
+ }
+
+ Stream.call(this, options)
+ destroyImpl.construct(this, () => {
+ const state = this._writableState
+
+ if (!state.writing) {
+ clearBuffer(this, state)
+ }
+
+ finishMaybe(this, state)
+ })
+}
+
+ObjectDefineProperty(Writable, SymbolHasInstance, {
+ value: function (object) {
+ if (FunctionPrototypeSymbolHasInstance(this, object)) return true
+ if (this !== Writable) return false
+ return object && object._writableState instanceof WritableState
+ }
+}) // Otherwise people can pipe Writable streams, which is just wrong.
+
+Writable.prototype.pipe = function () {
+ errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE())
+}
+
+function _write(stream, chunk, encoding, cb) {
+ const state = stream._writableState
+
+ if (typeof encoding === 'function') {
+ cb = encoding
+ encoding = state.defaultEncoding
+ } else {
+ if (!encoding) encoding = state.defaultEncoding
+ else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)
+ if (typeof cb !== 'function') cb = nop
+ }
+
+ if (chunk === null) {
+ throw new ERR_STREAM_NULL_VALUES()
+ } else if (!state.objectMode) {
+ if (typeof chunk === 'string') {
+ if (state.decodeStrings !== false) {
+ chunk = Buffer.from(chunk, encoding)
+ encoding = 'buffer'
+ }
+ } else if (chunk instanceof Buffer) {
+ encoding = 'buffer'
+ } else if (Stream._isUint8Array(chunk)) {
+ chunk = Stream._uint8ArrayToBuffer(chunk)
+ encoding = 'buffer'
+ } else {
+ throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk)
+ }
+ }
+
+ let err
+
+ if (state.ending) {
+ err = new ERR_STREAM_WRITE_AFTER_END()
+ } else if (state.destroyed) {
+ err = new ERR_STREAM_DESTROYED('write')
+ }
+
+ if (err) {
+ process.nextTick(cb, err)
+ errorOrDestroy(stream, err, true)
+ return err
+ }
+
+ state.pendingcb++
+ return writeOrBuffer(stream, state, chunk, encoding, cb)
+}
+
+Writable.prototype.write = function (chunk, encoding, cb) {
+ return _write(this, chunk, encoding, cb) === true
+}
+
+Writable.prototype.cork = function () {
+ this._writableState.corked++
+}
+
+Writable.prototype.uncork = function () {
+ const state = this._writableState
+
+ if (state.corked) {
+ state.corked--
+ if (!state.writing) clearBuffer(this, state)
+ }
+}
+
+Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
+ // node::ParseEncoding() requires lower case.
+ if (typeof encoding === 'string') encoding = StringPrototypeToLowerCase(encoding)
+ if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)
+ this._writableState.defaultEncoding = encoding
+ return this
+} // If we're already writing something, then just put this
+// in the queue, and wait our turn. Otherwise, call _write
+// If we return false, then we need a drain event, so set that flag.
+
+function writeOrBuffer(stream, state, chunk, encoding, callback) {
+ const len = state.objectMode ? 1 : chunk.length
+ state.length += len // stream._write resets state.length
+
+ const ret = state.length < state.highWaterMark // We must ensure that previous needDrain will not be reset to false.
+
+ if (!ret) state.needDrain = true
+
+ if (state.writing || state.corked || state.errored || !state.constructed) {
+ state.buffered.push({
+ chunk,
+ encoding,
+ callback
+ })
+
+ if (state.allBuffers && encoding !== 'buffer') {
+ state.allBuffers = false
+ }
+
+ if (state.allNoop && callback !== nop) {
+ state.allNoop = false
+ }
+ } else {
+ state.writelen = len
+ state.writecb = callback
+ state.writing = true
+ state.sync = true
+
+ stream._write(chunk, encoding, state.onwrite)
+
+ state.sync = false
+ } // Return false if errored or destroyed in order to break
+ // any synchronous while(stream.write(data)) loops.
+
+ return ret && !state.errored && !state.destroyed
+}
+
+function doWrite(stream, state, writev, len, chunk, encoding, cb) {
+ state.writelen = len
+ state.writecb = cb
+ state.writing = true
+ state.sync = true
+ if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'))
+ else if (writev) stream._writev(chunk, state.onwrite)
+ else stream._write(chunk, encoding, state.onwrite)
+ state.sync = false
+}
+
+function onwriteError(stream, state, er, cb) {
+ --state.pendingcb
+ cb(er) // Ensure callbacks are invoked even when autoDestroy is
+ // not enabled. Passing `er` here doesn't make sense since
+ // it's related to one specific write, not to the buffered
+ // writes.
+
+ errorBuffer(state) // This can emit error, but error must always follow cb.
+
+ errorOrDestroy(stream, er)
+}
+
+function onwrite(stream, er) {
+ const state = stream._writableState
+ const sync = state.sync
+ const cb = state.writecb
+
+ if (typeof cb !== 'function') {
+ errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK())
+ return
+ }
+
+ state.writing = false
+ state.writecb = null
+ state.length -= state.writelen
+ state.writelen = 0
+
+ if (er) {
+ // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
+ er.stack // eslint-disable-line no-unused-expressions
+
+ if (!state.errored) {
+ state.errored = er
+ } // In case of duplex streams we need to notify the readable side of the
+ // error.
+
+ if (stream._readableState && !stream._readableState.errored) {
+ stream._readableState.errored = er
+ }
+
+ if (sync) {
+ process.nextTick(onwriteError, stream, state, er, cb)
+ } else {
+ onwriteError(stream, state, er, cb)
+ }
+ } else {
+ if (state.buffered.length > state.bufferedIndex) {
+ clearBuffer(stream, state)
+ }
+
+ if (sync) {
+ // It is a common case that the callback passed to .write() is always
+ // the same. In that case, we do not schedule a new nextTick(), but
+ // rather just increase a counter, to improve performance and avoid
+ // memory allocations.
+ if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) {
+ state.afterWriteTickInfo.count++
+ } else {
+ state.afterWriteTickInfo = {
+ count: 1,
+ cb,
+ stream,
+ state
+ }
+ process.nextTick(afterWriteTick, state.afterWriteTickInfo)
+ }
+ } else {
+ afterWrite(stream, state, 1, cb)
+ }
+ }
+}
+
+function afterWriteTick({ stream, state, count, cb }) {
+ state.afterWriteTickInfo = null
+ return afterWrite(stream, state, count, cb)
+}
+
+function afterWrite(stream, state, count, cb) {
+ const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain
+
+ if (needDrain) {
+ state.needDrain = false
+ stream.emit('drain')
+ }
+
+ while (count-- > 0) {
+ state.pendingcb--
+ cb()
+ }
+
+ if (state.destroyed) {
+ errorBuffer(state)
+ }
+
+ finishMaybe(stream, state)
+} // If there's something in the buffer waiting, then invoke callbacks.
+
+function errorBuffer(state) {
+ if (state.writing) {
+ return
+ }
+
+ for (let n = state.bufferedIndex; n < state.buffered.length; ++n) {
+ var _state$errored
+
+ const { chunk, callback } = state.buffered[n]
+ const len = state.objectMode ? 1 : chunk.length
+ state.length -= len
+ callback(
+ (_state$errored = state.errored) !== null && _state$errored !== undefined
+ ? _state$errored
+ : new ERR_STREAM_DESTROYED('write')
+ )
+ }
+
+ const onfinishCallbacks = state[kOnFinished].splice(0)
+
+ for (let i = 0; i < onfinishCallbacks.length; i++) {
+ var _state$errored2
+
+ onfinishCallbacks[i](
+ (_state$errored2 = state.errored) !== null && _state$errored2 !== undefined
+ ? _state$errored2
+ : new ERR_STREAM_DESTROYED('end')
+ )
+ }
+
+ resetBuffer(state)
+} // If there's something in the buffer waiting, then process it.
+
+function clearBuffer(stream, state) {
+ if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) {
+ return
+ }
+
+ const { buffered, bufferedIndex, objectMode } = state
+ const bufferedLength = buffered.length - bufferedIndex
+
+ if (!bufferedLength) {
+ return
+ }
+
+ let i = bufferedIndex
+ state.bufferProcessing = true
+
+ if (bufferedLength > 1 && stream._writev) {
+ state.pendingcb -= bufferedLength - 1
+ const callback = state.allNoop
+ ? nop
+ : (err) => {
+ for (let n = i; n < buffered.length; ++n) {
+ buffered[n].callback(err)
+ }
+ } // Make a copy of `buffered` if it's going to be used by `callback` above,
+ // since `doWrite` will mutate the array.
+
+ const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i)
+ chunks.allBuffers = state.allBuffers
+ doWrite(stream, state, true, state.length, chunks, '', callback)
+ resetBuffer(state)
+ } else {
+ do {
+ const { chunk, encoding, callback } = buffered[i]
+ buffered[i++] = null
+ const len = objectMode ? 1 : chunk.length
+ doWrite(stream, state, false, len, chunk, encoding, callback)
+ } while (i < buffered.length && !state.writing)
+
+ if (i === buffered.length) {
+ resetBuffer(state)
+ } else if (i > 256) {
+ buffered.splice(0, i)
+ state.bufferedIndex = 0
+ } else {
+ state.bufferedIndex = i
+ }
+ }
+
+ state.bufferProcessing = false
+}
+
+Writable.prototype._write = function (chunk, encoding, cb) {
+ if (this._writev) {
+ this._writev(
+ [
+ {
+ chunk,
+ encoding
+ }
+ ],
+ cb
+ )
+ } else {
+ throw new ERR_METHOD_NOT_IMPLEMENTED('_write()')
+ }
+}
+
+Writable.prototype._writev = null
+
+Writable.prototype.end = function (chunk, encoding, cb) {
+ const state = this._writableState
+
+ if (typeof chunk === 'function') {
+ cb = chunk
+ chunk = null
+ encoding = null
+ } else if (typeof encoding === 'function') {
+ cb = encoding
+ encoding = null
+ }
+
+ let err
+
+ if (chunk !== null && chunk !== undefined) {
+ const ret = _write(this, chunk, encoding)
+
+ if (ret instanceof Error) {
+ err = ret
+ }
+ } // .end() fully uncorks.
+
+ if (state.corked) {
+ state.corked = 1
+ this.uncork()
+ }
+
+ if (err) {
+ // Do nothing...
+ } else if (!state.errored && !state.ending) {
+ // This is forgiving in terms of unnecessary calls to end() and can hide
+ // logic errors. However, usually such errors are harmless and causing a
+ // hard error can be disproportionately destructive. It is not always
+ // trivial for the user to determine whether end() needs to be called
+ // or not.
+ state.ending = true
+ finishMaybe(this, state, true)
+ state.ended = true
+ } else if (state.finished) {
+ err = new ERR_STREAM_ALREADY_FINISHED('end')
+ } else if (state.destroyed) {
+ err = new ERR_STREAM_DESTROYED('end')
+ }
+
+ if (typeof cb === 'function') {
+ if (err || state.finished) {
+ process.nextTick(cb, err)
+ } else {
+ state[kOnFinished].push(cb)
+ }
+ }
+
+ return this
+}
+
+function needFinish(state) {
+ return (
+ state.ending &&
+ !state.destroyed &&
+ state.constructed &&
+ state.length === 0 &&
+ !state.errored &&
+ state.buffered.length === 0 &&
+ !state.finished &&
+ !state.writing &&
+ !state.errorEmitted &&
+ !state.closeEmitted
+ )
+}
+
+function callFinal(stream, state) {
+ let called = false
+
+ function onFinish(err) {
+ if (called) {
+ errorOrDestroy(stream, err !== null && err !== undefined ? err : ERR_MULTIPLE_CALLBACK())
+ return
+ }
+
+ called = true
+ state.pendingcb--
+
+ if (err) {
+ const onfinishCallbacks = state[kOnFinished].splice(0)
+
+ for (let i = 0; i < onfinishCallbacks.length; i++) {
+ onfinishCallbacks[i](err)
+ }
+
+ errorOrDestroy(stream, err, state.sync)
+ } else if (needFinish(state)) {
+ state.prefinished = true
+ stream.emit('prefinish') // Backwards compat. Don't check state.sync here.
+ // Some streams assume 'finish' will be emitted
+ // asynchronously relative to _final callback.
+
+ state.pendingcb++
+ process.nextTick(finish, stream, state)
+ }
+ }
+
+ state.sync = true
+ state.pendingcb++
+
+ try {
+ stream._final(onFinish)
+ } catch (err) {
+ onFinish(err)
+ }
+
+ state.sync = false
+}
+
+function prefinish(stream, state) {
+ if (!state.prefinished && !state.finalCalled) {
+ if (typeof stream._final === 'function' && !state.destroyed) {
+ state.finalCalled = true
+ callFinal(stream, state)
+ } else {
+ state.prefinished = true
+ stream.emit('prefinish')
+ }
+ }
+}
+
+function finishMaybe(stream, state, sync) {
+ if (needFinish(state)) {
+ prefinish(stream, state)
+
+ if (state.pendingcb === 0) {
+ if (sync) {
+ state.pendingcb++
+ process.nextTick(
+ (stream, state) => {
+ if (needFinish(state)) {
+ finish(stream, state)
+ } else {
+ state.pendingcb--
+ }
+ },
+ stream,
+ state
+ )
+ } else if (needFinish(state)) {
+ state.pendingcb++
+ finish(stream, state)
+ }
+ }
+ }
+}
+
+function finish(stream, state) {
+ state.pendingcb--
+ state.finished = true
+ const onfinishCallbacks = state[kOnFinished].splice(0)
+
+ for (let i = 0; i < onfinishCallbacks.length; i++) {
+ onfinishCallbacks[i]()
+ }
+
+ stream.emit('finish')
+
+ if (state.autoDestroy) {
+ // In case of duplex streams we need a way to detect
+ // if the readable side is ready for autoDestroy as well.
+ const rState = stream._readableState
+ const autoDestroy =
+ !rState ||
+ (rState.autoDestroy && // We don't expect the readable to ever 'end'
+ // if readable is explicitly set to false.
+ (rState.endEmitted || rState.readable === false))
+
+ if (autoDestroy) {
+ stream.destroy()
+ }
+ }
+}
+
+ObjectDefineProperties(Writable.prototype, {
+ closed: {
+ get() {
+ return this._writableState ? this._writableState.closed : false
+ }
+ },
+ destroyed: {
+ get() {
+ return this._writableState ? this._writableState.destroyed : false
+ },
+
+ set(value) {
+ // Backward compatibility, the user is explicitly managing destroyed.
+ if (this._writableState) {
+ this._writableState.destroyed = value
+ }
+ }
+ },
+ writable: {
+ get() {
+ const w = this._writableState // w.writable === false means that this is part of a Duplex stream
+ // where the writable side was disabled upon construction.
+ // Compat. The user might manually disable writable side through
+ // deprecated setter.
+
+ return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended
+ },
+
+ set(val) {
+ // Backwards compatible.
+ if (this._writableState) {
+ this._writableState.writable = !!val
+ }
+ }
+ },
+ writableFinished: {
+ get() {
+ return this._writableState ? this._writableState.finished : false
+ }
+ },
+ writableObjectMode: {
+ get() {
+ return this._writableState ? this._writableState.objectMode : false
+ }
+ },
+ writableBuffer: {
+ get() {
+ return this._writableState && this._writableState.getBuffer()
+ }
+ },
+ writableEnded: {
+ get() {
+ return this._writableState ? this._writableState.ending : false
+ }
+ },
+ writableNeedDrain: {
+ get() {
+ const wState = this._writableState
+ if (!wState) return false
+ return !wState.destroyed && !wState.ending && wState.needDrain
+ }
+ },
+ writableHighWaterMark: {
+ get() {
+ return this._writableState && this._writableState.highWaterMark
+ }
+ },
+ writableCorked: {
+ get() {
+ return this._writableState ? this._writableState.corked : 0
+ }
+ },
+ writableLength: {
+ get() {
+ return this._writableState && this._writableState.length
+ }
+ },
+ errored: {
+ enumerable: false,
+
+ get() {
+ return this._writableState ? this._writableState.errored : null
+ }
+ },
+ writableAborted: {
+ enumerable: false,
+ get: function () {
+ return !!(
+ this._writableState.writable !== false &&
+ (this._writableState.destroyed || this._writableState.errored) &&
+ !this._writableState.finished
+ )
+ }
+ }
+})
+const destroy = destroyImpl.destroy
+
+Writable.prototype.destroy = function (err, cb) {
+ const state = this._writableState // Invoke pending callbacks.
+
+ if (!state.destroyed && (state.bufferedIndex < state.buffered.length || state[kOnFinished].length)) {
+ process.nextTick(errorBuffer, state)
+ }
+
+ destroy.call(this, err, cb)
+ return this
+}
+
+Writable.prototype._undestroy = destroyImpl.undestroy
+
+Writable.prototype._destroy = function (err, cb) {
+ cb(err)
+}
+
+Writable.prototype[EE.captureRejectionSymbol] = function (err) {
+ this.destroy(err)
+}
+
+let webStreamsAdapters // Lazy to avoid circular references
+
+function lazyWebStreams() {
+ if (webStreamsAdapters === undefined) webStreamsAdapters = {}
+ return webStreamsAdapters
+}
+
+Writable.fromWeb = function (writableStream, options) {
+ return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options)
+}
+
+Writable.toWeb = function (streamWritable) {
+ return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable)
+}
diff --git a/lib/internal/validators.js b/lib/internal/validators.js
new file mode 100644
index 0000000000..11689d2ba4
--- /dev/null
+++ b/lib/internal/validators.js
@@ -0,0 +1,246 @@
+'use strict'
+
+const {
+ ArrayIsArray,
+ ArrayPrototypeIncludes,
+ ArrayPrototypeJoin,
+ ArrayPrototypeMap,
+ NumberIsInteger,
+ NumberMAX_SAFE_INTEGER,
+ NumberMIN_SAFE_INTEGER,
+ NumberParseInt,
+ RegExpPrototypeTest,
+ String,
+ StringPrototypeToUpperCase,
+ StringPrototypeTrim
+} = require('../ours/primordials')
+
+const {
+ hideStackFrames,
+ codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL }
+} = require('../ours/errors')
+
+const { normalizeEncoding } = require('../ours/util')
+
+const { isAsyncFunction, isArrayBufferView } = require('../ours/util').types
+
+const signals = {}
+
+function isInt32(value) {
+ return value === (value | 0)
+}
+
+function isUint32(value) {
+ return value === value >>> 0
+}
+
+const octalReg = /^[0-7]+$/
+const modeDesc = 'must be a 32-bit unsigned integer or an octal string'
+/**
+ * Parse and validate values that will be converted into mode_t (the S_*
+ * constants). Only valid numbers and octal strings are allowed. They could be
+ * converted to 32-bit unsigned integers or non-negative signed integers in the
+ * C++ land, but any value higher than 0o777 will result in platform-specific
+ * behaviors.
+ *
+ * @param {*} value Values to be validated
+ * @param {string} name Name of the argument
+ * @param {number} [def] If specified, will be returned for invalid values
+ * @returns {number}
+ */
+
+function parseFileMode(value, name, def) {
+ if (typeof value === 'undefined') {
+ value = def
+ }
+
+ if (typeof value === 'string') {
+ if (!RegExpPrototypeTest(octalReg, value)) {
+ throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc)
+ }
+
+ value = NumberParseInt(value, 8)
+ }
+
+ validateInt32(value, name, 0, 2 ** 32 - 1)
+ return value
+}
+
+const validateInteger = hideStackFrames((value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => {
+ if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
+ if (!NumberIsInteger(value)) throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
+ if (value < min || value > max) throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
+})
+const validateInt32 = hideStackFrames((value, name, min = -2147483648, max = 2147483647) => {
+ // The defaults for min and max correspond to the limits of 32-bit integers.
+ if (typeof value !== 'number') {
+ throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
+ }
+
+ if (!isInt32(value)) {
+ if (!NumberIsInteger(value)) {
+ throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
+ }
+
+ throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
+ }
+
+ if (value < min || value > max) {
+ throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
+ }
+})
+const validateUint32 = hideStackFrames((value, name, positive) => {
+ if (typeof value !== 'number') {
+ throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
+ }
+
+ if (!isUint32(value)) {
+ if (!NumberIsInteger(value)) {
+ throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
+ }
+
+ const min = positive ? 1 : 0 // 2 ** 32 === 4294967296
+
+ throw new ERR_OUT_OF_RANGE(name, `>= ${min} && < 4294967296`, value)
+ }
+
+ if (positive && value === 0) {
+ throw new ERR_OUT_OF_RANGE(name, '>= 1 && < 4294967296', value)
+ }
+})
+
+function validateString(value, name) {
+ if (typeof value !== 'string') throw new ERR_INVALID_ARG_TYPE(name, 'string', value)
+}
+
+function validateNumber(value, name) {
+ if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
+}
+
+const validateOneOf = hideStackFrames((value, name, oneOf) => {
+ if (!ArrayPrototypeIncludes(oneOf, value)) {
+ const allowed = ArrayPrototypeJoin(
+ ArrayPrototypeMap(oneOf, (v) => (typeof v === 'string' ? `'${v}'` : String(v))),
+ ', '
+ )
+ const reason = 'must be one of: ' + allowed
+ throw new ERR_INVALID_ARG_VALUE(name, value, reason)
+ }
+})
+
+function validateBoolean(value, name) {
+ if (typeof value !== 'boolean') throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value)
+}
+/**
+ * @param {unknown} value
+ * @param {string} name
+ * @param {{
+ * allowArray?: boolean,
+ * allowFunction?: boolean,
+ * nullable?: boolean
+ * }} [options]
+ */
+
+const validateObject = hideStackFrames((value, name, options) => {
+ const useDefaultOptions = options == null
+ const allowArray = useDefaultOptions ? false : options.allowArray
+ const allowFunction = useDefaultOptions ? false : options.allowFunction
+ const nullable = useDefaultOptions ? false : options.nullable
+
+ if (
+ (!nullable && value === null) ||
+ (!allowArray && ArrayIsArray(value)) ||
+ (typeof value !== 'object' && (!allowFunction || typeof value !== 'function'))
+ ) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'Object', value)
+ }
+})
+const validateArray = hideStackFrames((value, name, minLength = 0) => {
+ if (!ArrayIsArray(value)) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'Array', value)
+ }
+
+ if (value.length < minLength) {
+ const reason = `must be longer than ${minLength}`
+ throw new ERR_INVALID_ARG_VALUE(name, value, reason)
+ }
+})
+
+function validateSignalName(signal, name = 'signal') {
+ validateString(signal, name)
+
+ if (signals[signal] === undefined) {
+ if (signals[StringPrototypeToUpperCase(signal)] !== undefined) {
+ throw new ERR_UNKNOWN_SIGNAL(signal + ' (signals must use all capital letters)')
+ }
+
+ throw new ERR_UNKNOWN_SIGNAL(signal)
+ }
+}
+
+const validateBuffer = hideStackFrames((buffer, name = 'buffer') => {
+ if (!isArrayBufferView(buffer)) {
+ throw new ERR_INVALID_ARG_TYPE(name, ['Buffer', 'TypedArray', 'DataView'], buffer)
+ }
+})
+
+function validateEncoding(data, encoding) {
+ const normalizedEncoding = normalizeEncoding(encoding)
+ const length = data.length
+
+ if (normalizedEncoding === 'hex' && length % 2 !== 0) {
+ throw new ERR_INVALID_ARG_VALUE('encoding', encoding, `is invalid for data of length ${length}`)
+ }
+} // Check that the port number is not NaN when coerced to a number,
+// is an integer and that it falls within the legal range of port numbers.
+
+function validatePort(port, name = 'Port', allowZero = true) {
+ if (
+ (typeof port !== 'number' && typeof port !== 'string') ||
+ (typeof port === 'string' && StringPrototypeTrim(port).length === 0) ||
+ +port !== +port >>> 0 ||
+ port > 0xffff ||
+ (port === 0 && !allowZero)
+ ) {
+ throw new ERR_SOCKET_BAD_PORT(name, port, allowZero)
+ }
+
+ return port | 0
+}
+
+const validateAbortSignal = hideStackFrames((signal, name) => {
+ if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
+ }
+})
+const validateFunction = hideStackFrames((value, name) => {
+ if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
+})
+const validatePlainFunction = hideStackFrames((value, name) => {
+ if (typeof value !== 'function' || isAsyncFunction(value)) throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
+})
+const validateUndefined = hideStackFrames((value, name) => {
+ if (value !== undefined) throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value)
+})
+module.exports = {
+ isInt32,
+ isUint32,
+ parseFileMode,
+ validateArray,
+ validateBoolean,
+ validateBuffer,
+ validateEncoding,
+ validateFunction,
+ validateInt32,
+ validateInteger,
+ validateNumber,
+ validateObject,
+ validateOneOf,
+ validatePlainFunction,
+ validatePort,
+ validateSignalName,
+ validateString,
+ validateUint32,
+ validateUndefined,
+ validateAbortSignal
+}
diff --git a/lib/ours/browser.js b/lib/ours/browser.js
new file mode 100644
index 0000000000..7083fb31e5
--- /dev/null
+++ b/lib/ours/browser.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const CustomStream = require('../stream')
+
+const promises = require('../stream/promises')
+
+const originalDestroy = CustomStream.Readable.destroy
+module.exports = CustomStream.Readable // Explicit export naming is needed for ESM
+
+module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
+module.exports._isUint8Array = CustomStream._isUint8Array
+module.exports.isDisturbed = CustomStream.isDisturbed
+module.exports.isErrored = CustomStream.isErrored
+module.exports.isReadable = CustomStream.isReadable
+module.exports.Readable = CustomStream.Readable
+module.exports.Writable = CustomStream.Writable
+module.exports.Duplex = CustomStream.Duplex
+module.exports.Transform = CustomStream.Transform
+module.exports.PassThrough = CustomStream.PassThrough
+module.exports.addAbortSignal = CustomStream.addAbortSignal
+module.exports.finished = CustomStream.finished
+module.exports.destroy = CustomStream.destroy
+module.exports.destroy = originalDestroy
+module.exports.pipeline = CustomStream.pipeline
+module.exports.compose = CustomStream.compose
+Object.defineProperty(CustomStream, 'promises', {
+ configurable: true,
+ enumerable: true,
+
+ get() {
+ return promises
+ }
+})
+module.exports.Stream = CustomStream.Stream // Allow default importing
+
+module.exports.default = module.exports
diff --git a/lib/ours/errors.js b/lib/ours/errors.js
new file mode 100644
index 0000000000..fab32d2549
--- /dev/null
+++ b/lib/ours/errors.js
@@ -0,0 +1,380 @@
+'use strict'
+
+const { format, inspect, AggregateError: CustomAggregateError } = require('./util')
+/*
+ This file is a reduced and adapted version of the main lib/internal/errors.js file defined at
+
+ https://github.com/nodejs/node/blob/master/lib/internal/errors.js
+
+ Don't try to replace with the original file and keep it up to date (starting from E(...) definitions)
+ with the upstream file.
+*/
+
+const AggregateError = globalThis.AggregateError || CustomAggregateError
+const kIsNodeError = Symbol('kIsNodeError')
+const kTypes = [
+ 'string',
+ 'function',
+ 'number',
+ 'object', // Accept 'Function' and 'Object' as alternative to the lower cased version.
+ 'Function',
+ 'Object',
+ 'boolean',
+ 'bigint',
+ 'symbol'
+]
+const classRegExp = /^([A-Z][a-z0-9]*)+$/
+const nodeInternalPrefix = '__node_internal_'
+const codes = {}
+
+function assert(value, message) {
+ if (!value) {
+ throw new codes.ERR_INTERNAL_ASSERTION(message)
+ }
+} // Only use this for integers! Decimal numbers do not work with this function.
+
+function addNumericalSeparator(val) {
+ let res = ''
+ let i = val.length
+ const start = val[0] === '-' ? 1 : 0
+
+ for (; i >= start + 4; i -= 3) {
+ res = `_${val.slice(i - 3, i)}${res}`
+ }
+
+ return `${val.slice(0, i)}${res}`
+}
+
+function getMessage(key, msg, args) {
+ if (typeof msg === 'function') {
+ assert(
+ msg.length <= args.length, // Default options do not count.
+ `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).`
+ )
+ return msg(...args)
+ }
+
+ const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length
+ assert(
+ expectedLength === args.length,
+ `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).`
+ )
+
+ if (args.length === 0) {
+ return msg
+ }
+
+ return format(msg, ...args)
+}
+
+function E(code, message, Base) {
+ if (!Base) {
+ Base = Error
+ }
+
+ class NodeError extends Base {
+ constructor(...args) {
+ super(getMessage(code, message, args))
+ }
+
+ toString() {
+ return `${this.name} [${code}]: ${this.message}`
+ }
+ }
+
+ NodeError.prototype.name = Base.name
+ NodeError.prototype.code = code
+ NodeError.prototype[kIsNodeError] = true
+
+ NodeError.prototype.toString = function () {
+ return `${this.name} [${code}]: ${this.message}`
+ }
+
+ codes[code] = NodeError
+}
+
+function hideStackFrames(fn) {
+ // We rename the functions that will be hidden to cut off the stacktrace
+ // at the outermost one
+ const hidden = nodeInternalPrefix + fn.name
+ Object.defineProperty(fn, 'name', {
+ value: hidden
+ })
+ return fn
+}
+
+function aggregateTwoErrors(innerError, outerError) {
+ if (innerError && outerError && innerError !== outerError) {
+ if (Array.isArray(outerError.errors)) {
+ // If `outerError` is already an `AggregateError`.
+ outerError.errors.push(innerError)
+ return outerError
+ }
+
+ const err = new AggregateError([outerError, innerError], outerError.message)
+ err.code = outerError.code
+ return err
+ }
+
+ return innerError || outerError
+}
+
+class AbortError extends Error {
+ constructor(message = 'The operation was aborted', options = undefined) {
+ if (options !== undefined && typeof options !== 'object') {
+ throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options)
+ }
+
+ super(message, options)
+ this.code = 'ABORT_ERR'
+ this.name = 'AbortError'
+ }
+}
+
+E('ERR_ASSERTION', '%s', Error)
+E(
+ 'ERR_INVALID_ARG_TYPE',
+ (name, expected, actual) => {
+ assert(typeof name === 'string', "'name' must be a string")
+
+ if (!Array.isArray(expected)) {
+ expected = [expected]
+ }
+
+ let msg = 'The '
+
+ if (name.endsWith(' argument')) {
+ // For cases like 'first argument'
+ msg += `${name} `
+ } else {
+ msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} `
+ }
+
+ msg += 'must be '
+ const types = []
+ const instances = []
+ const other = []
+
+ for (const value of expected) {
+ assert(typeof value === 'string', 'All expected entries have to be of type string')
+
+ if (kTypes.includes(value)) {
+ types.push(value.toLowerCase())
+ } else if (classRegExp.test(value)) {
+ instances.push(value)
+ } else {
+ assert(value !== 'object', 'The value "object" should be written as "Object"')
+ other.push(value)
+ }
+ } // Special handle `object` in case other instances are allowed to outline
+ // the differences between each other.
+
+ if (instances.length > 0) {
+ const pos = types.indexOf('object')
+
+ if (pos !== -1) {
+ types.splice(types, pos, 1)
+ instances.push('Object')
+ }
+ }
+
+ if (types.length > 0) {
+ switch (types.length) {
+ case 1:
+ msg += `of type ${types[0]}`
+ break
+
+ case 2:
+ msg += `one of type ${types[0]} or ${types[1]}`
+ break
+
+ default: {
+ const last = types.pop()
+ msg += `one of type ${types.join(', ')}, or ${last}`
+ }
+ }
+
+ if (instances.length > 0 || other.length > 0) {
+ msg += ' or '
+ }
+ }
+
+ if (instances.length > 0) {
+ switch (instances.length) {
+ case 1:
+ msg += `an instance of ${instances[0]}`
+ break
+
+ case 2:
+ msg += `an instance of ${instances[0]} or ${instances[1]}`
+ break
+
+ default: {
+ const last = instances.pop()
+ msg += `an instance of ${instances.join(', ')}, or ${last}`
+ }
+ }
+
+ if (other.length > 0) {
+ msg += ' or '
+ }
+ }
+
+ switch (other.length) {
+ case 0:
+ break
+
+ case 1:
+ if (other[0].toLowerCase() !== other[0]) {
+ msg += 'an '
+ }
+
+ msg += `${other[0]}`
+ break
+
+ case 2:
+ msg += `one of ${other[0]} or ${other[1]}`
+ break
+
+ default: {
+ const last = other.pop()
+ msg += `one of ${other.join(', ')}, or ${last}`
+ }
+ }
+
+ if (actual == null) {
+ msg += `. Received ${actual}`
+ } else if (typeof actual === 'function' && actual.name) {
+ msg += `. Received function ${actual.name}`
+ } else if (typeof actual === 'object') {
+ var _actual$constructor
+
+ if (
+ (_actual$constructor = actual.constructor) !== null &&
+ _actual$constructor !== undefined &&
+ _actual$constructor.name
+ ) {
+ msg += `. Received an instance of ${actual.constructor.name}`
+ } else {
+ const inspected = inspect(actual, {
+ depth: -1
+ })
+ msg += `. Received ${inspected}`
+ }
+ } else {
+ let inspected = inspect(actual, {
+ colors: false
+ })
+
+ if (inspected.length > 25) {
+ inspected = `${inspected.slice(0, 25)}...`
+ }
+
+ msg += `. Received type ${typeof actual} (${inspected})`
+ }
+
+ return msg
+ },
+ TypeError
+)
+E(
+ 'ERR_INVALID_ARG_VALUE',
+ (name, value, reason = 'is invalid') => {
+ let inspected = inspect(value)
+
+ if (inspected.length > 128) {
+ inspected = inspected.slice(0, 128) + '...'
+ }
+
+ const type = name.includes('.') ? 'property' : 'argument'
+ return `The ${type} '${name}' ${reason}. Received ${inspected}`
+ },
+ TypeError
+)
+E(
+ 'ERR_INVALID_RETURN_VALUE',
+ (input, name, value) => {
+ var _value$constructor
+
+ const type =
+ value !== null &&
+ value !== undefined &&
+ (_value$constructor = value.constructor) !== null &&
+ _value$constructor !== undefined &&
+ _value$constructor.name
+ ? `instance of ${value.constructor.name}`
+ : `type ${typeof value}`
+ return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`
+ },
+ TypeError
+)
+E(
+ 'ERR_MISSING_ARGS',
+ (...args) => {
+ assert(args.length > 0, 'At least one arg needs to be specified')
+ let msg
+ const len = args.length
+ args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ')
+
+ switch (len) {
+ case 1:
+ msg += `The ${args[0]} argument`
+ break
+
+ case 2:
+ msg += `The ${args[0]} and ${args[1]} arguments`
+ break
+
+ default:
+ {
+ const last = args.pop()
+ msg += `The ${args.join(', ')}, and ${last} arguments`
+ }
+ break
+ }
+
+ return `${msg} must be specified`
+ },
+ TypeError
+)
+E(
+ 'ERR_OUT_OF_RANGE',
+ (str, range, input) => {
+ assert(range, 'Missing "range" argument')
+ let received
+
+ if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) {
+ received = addNumericalSeparator(String(input))
+ } else if (typeof input === 'bigint') {
+ received = String(input)
+
+ if (input > 2n ** 32n || input < -(2n ** 32n)) {
+ received = addNumericalSeparator(received)
+ }
+
+ received += 'n'
+ } else {
+ received = inspect(input)
+ }
+
+ return `The value of "${str}" is out of range. It must be ${range}. Received ${received}`
+ },
+ RangeError
+)
+E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error)
+E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error)
+E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error)
+E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error)
+E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error)
+E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError)
+E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error)
+E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error)
+E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error)
+E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error)
+E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError)
+module.exports = {
+ AbortError,
+ aggregateTwoErrors: hideStackFrames(aggregateTwoErrors),
+ hideStackFrames,
+ codes
+}
diff --git a/lib/ours/index.js b/lib/ours/index.js
new file mode 100644
index 0000000000..1a6af8ad86
--- /dev/null
+++ b/lib/ours/index.js
@@ -0,0 +1,67 @@
+'use strict'
+
+const Stream = require('stream')
+
+if (Stream && process.env.READABLE_STREAM === 'disable') {
+ const promises = Stream.promises // Explicit export naming is needed for ESM
+
+ module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer
+ module.exports._isUint8Array = Stream._isUint8Array
+ module.exports.isDisturbed = Stream.isDisturbed
+ module.exports.isErrored = Stream.isErrored
+ module.exports.isReadable = Stream.isReadable
+ module.exports.Readable = Stream.Readable
+ module.exports.Writable = Stream.Writable
+ module.exports.Duplex = Stream.Duplex
+ module.exports.Transform = Stream.Transform
+ module.exports.PassThrough = Stream.PassThrough
+ module.exports.addAbortSignal = Stream.addAbortSignal
+ module.exports.finished = Stream.finished
+ module.exports.destroy = Stream.destroy
+ module.exports.pipeline = Stream.pipeline
+ module.exports.compose = Stream.compose
+ Object.defineProperty(Stream, 'promises', {
+ configurable: true,
+ enumerable: true,
+
+ get() {
+ return promises
+ }
+ })
+ module.exports.Stream = Stream.Stream
+} else {
+ const CustomStream = require('../stream')
+
+ const promises = require('../stream/promises')
+
+ const originalDestroy = CustomStream.Readable.destroy
+ module.exports = CustomStream.Readable // Explicit export naming is needed for ESM
+
+ module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
+ module.exports._isUint8Array = CustomStream._isUint8Array
+ module.exports.isDisturbed = CustomStream.isDisturbed
+ module.exports.isErrored = CustomStream.isErrored
+ module.exports.isReadable = CustomStream.isReadable
+ module.exports.Readable = CustomStream.Readable
+ module.exports.Writable = CustomStream.Writable
+ module.exports.Duplex = CustomStream.Duplex
+ module.exports.Transform = CustomStream.Transform
+ module.exports.PassThrough = CustomStream.PassThrough
+ module.exports.addAbortSignal = CustomStream.addAbortSignal
+ module.exports.finished = CustomStream.finished
+ module.exports.destroy = CustomStream.destroy
+ module.exports.destroy = originalDestroy
+ module.exports.pipeline = CustomStream.pipeline
+ module.exports.compose = CustomStream.compose
+ Object.defineProperty(CustomStream, 'promises', {
+ configurable: true,
+ enumerable: true,
+
+ get() {
+ return promises
+ }
+ })
+ module.exports.Stream = CustomStream.Stream
+} // Allow default importing
+
+module.exports.default = module.exports
diff --git a/lib/ours/primordials.js b/lib/ours/primordials.js
new file mode 100644
index 0000000000..fab7a28e44
--- /dev/null
+++ b/lib/ours/primordials.js
@@ -0,0 +1,130 @@
+'use strict'
+/*
+ This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at
+
+ https://github.com/nodejs/node/blob/master/lib/internal/per_context/primordials.js
+
+ Don't try to replace with the original file and keep it up to date with the upstream file.
+*/
+
+module.exports = {
+ ArrayIsArray(self) {
+ return Array.isArray(self)
+ },
+
+ ArrayPrototypeIncludes(self, el) {
+ return self.includes(el)
+ },
+
+ ArrayPrototypeIndexOf(self, el) {
+ return self.indexOf(el)
+ },
+
+ ArrayPrototypeJoin(self, sep) {
+ return self.join(sep)
+ },
+
+ ArrayPrototypeMap(self, fn) {
+ return self.map(fn)
+ },
+
+ ArrayPrototypePop(self, el) {
+ return self.pop(el)
+ },
+
+ ArrayPrototypePush(self, el) {
+ return self.push(el)
+ },
+
+ ArrayPrototypeSlice(self, start, end) {
+ return self.slice(start, end)
+ },
+
+ Error,
+
+ FunctionPrototypeCall(fn, thisArgs, ...args) {
+ return fn.call(thisArgs, ...args)
+ },
+
+ FunctionPrototypeSymbolHasInstance(self, instance) {
+ return Function.prototype[Symbol.hasInstance].call(self, instance)
+ },
+
+ MathFloor: Math.floor,
+ Number,
+ NumberIsInteger: Number.isInteger,
+ NumberIsNaN: Number.isNaN,
+ NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER,
+ NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER,
+ NumberParseInt: Number.parseInt,
+
+ ObjectDefineProperties(self, props) {
+ return Object.defineProperties(self, props)
+ },
+
+ ObjectDefineProperty(self, name, prop) {
+ return Object.defineProperty(self, name, prop)
+ },
+
+ ObjectGetOwnPropertyDescriptor(self, name) {
+ return Object.getOwnPropertyDescriptor(self, name)
+ },
+
+ ObjectKeys(obj) {
+ return Object.keys(obj)
+ },
+
+ ObjectSetPrototypeOf(target, proto) {
+ return Object.setPrototypeOf(target, proto)
+ },
+
+ Promise,
+
+ PromisePrototypeCatch(self, fn) {
+ return self.catch(fn)
+ },
+
+ PromisePrototypeThen(self, thenFn, catchFn) {
+ return self.then(thenFn, catchFn)
+ },
+
+ PromiseReject(err) {
+ return Promise.reject(err)
+ },
+
+ ReflectApply: Reflect.apply,
+
+ RegExpPrototypeTest(self, value) {
+ return self.test(value)
+ },
+
+ SafeSet: Set,
+ String,
+
+ StringPrototypeSlice(self, start, end) {
+ return self.slice(start, end)
+ },
+
+ StringPrototypeToLowerCase(self) {
+ return self.toLowerCase()
+ },
+
+ StringPrototypeToUpperCase(self) {
+ return self.toUpperCase()
+ },
+
+ StringPrototypeTrim(self) {
+ return self.trim()
+ },
+
+ Symbol,
+ SymbolAsyncIterator: Symbol.asyncIterator,
+ SymbolHasInstance: Symbol.hasInstance,
+ SymbolIterator: Symbol.iterator,
+
+ TypedArrayPrototypeSet(self, buf, len) {
+ return self.set(buf, len)
+ },
+
+ Uint8Array
+}
diff --git a/lib/ours/util.js b/lib/ours/util.js
new file mode 100644
index 0000000000..275ac68ce1
--- /dev/null
+++ b/lib/ours/util.js
@@ -0,0 +1,149 @@
+'use strict'
+
+const bufferModule = require('buffer')
+
+const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor
+const Blob = globalThis.Blob || bufferModule.Blob
+/* eslint-disable indent */
+
+const isBlob =
+ typeof Blob !== 'undefined'
+ ? function isBlob(b) {
+ // eslint-disable-next-line indent
+ return b instanceof Blob
+ }
+ : function isBlob(b) {
+ return false
+ }
+/* eslint-enable indent */
+// This is a simplified version of AggregateError
+
+class AggregateError extends Error {
+ constructor(errors) {
+ if (!Array.isArray(errors)) {
+ throw new TypeError(`Expected input to be an Array, got ${typeof errors}`)
+ }
+
+ let message = ''
+
+ for (let i = 0; i < errors.length; i++) {
+ message += ` ${errors[i].stack}\n`
+ }
+
+ super(message)
+ this.name = 'AggregateError'
+ this.errors = errors
+ }
+}
+
+module.exports = {
+ AggregateError,
+
+ once(callback) {
+ let called = false
+ return function (...args) {
+ if (called) {
+ return
+ }
+
+ called = true
+ callback.apply(this, args)
+ }
+ },
+
+ createDeferredPromise: function () {
+ let resolve
+ let reject // eslint-disable-next-line promise/param-names
+
+ const promise = new Promise((res, rej) => {
+ resolve = res
+ reject = rej
+ })
+ return {
+ promise,
+ resolve,
+ reject
+ }
+ },
+
+ promisify(fn) {
+ return new Promise((resolve, reject) => {
+ fn((err, ...args) => {
+ if (err) {
+ return reject(err)
+ }
+
+ return resolve(...args)
+ })
+ })
+ },
+
+ debuglog() {
+ return function () {}
+ },
+
+ format(format, ...args) {
+ // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args
+ return format.replace(/%([sdifj])/g, function (...[_unused, type]) {
+ const replacement = args.shift()
+
+ if (type === 'f') {
+ return replacement.toFixed(6)
+ } else if (type === 'j') {
+ return JSON.stringify(replacement)
+ } else if (type === 's' && typeof replacement === 'object') {
+ const ctor = replacement.constructor !== Object ? replacement.constructor.name : ''
+ return `${ctor} {}`.trim()
+ } else {
+ return replacement.toString()
+ }
+ })
+ },
+
+ inspect(value) {
+ // Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options
+ switch (typeof value) {
+ case 'string':
+ if (value.includes("'")) {
+ if (!value.includes('"')) {
+ return `"${value}"`
+ } else if (!value.includes('`') && !value.includes('${')) {
+ return `\`${value}\``
+ }
+ }
+
+ return `'${value}'`
+
+ case 'number':
+ if (isNaN(value)) {
+ return 'NaN'
+ } else if (Object.is(value, -0)) {
+ return String(value)
+ }
+
+ return value
+
+ case 'bigint':
+ return `${String(value)}n`
+
+ case 'boolean':
+ case 'undefined':
+ return String(value)
+
+ case 'object':
+ return '{}'
+ }
+ },
+
+ types: {
+ isAsyncFunction(fn) {
+ return fn instanceof AsyncFunction
+ },
+
+ isArrayBufferView(arr) {
+ return ArrayBuffer.isView(arr)
+ }
+ },
+ isBlob
+}
+module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom')
diff --git a/lib/stream.js b/lib/stream.js
new file mode 100644
index 0000000000..3112086306
--- /dev/null
+++ b/lib/stream.js
@@ -0,0 +1,149 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
+
+const { ObjectDefineProperty, ObjectKeys, ReflectApply } = require('./ours/primordials')
+
+const {
+ promisify: { custom: customPromisify }
+} = require('./ours/util')
+
+const { streamReturningOperators, promiseReturningOperators } = require('./internal/streams/operators')
+
+const {
+ codes: { ERR_ILLEGAL_CONSTRUCTOR }
+} = require('./ours/errors')
+
+const compose = require('./internal/streams/compose')
+
+const { pipeline } = require('./internal/streams/pipeline')
+
+const { destroyer } = require('./internal/streams/destroy')
+
+const eos = require('./internal/streams/end-of-stream')
+
+const internalBuffer = {}
+
+const promises = require('./stream/promises')
+
+const utils = require('./internal/streams/utils')
+
+const Stream = (module.exports = require('./internal/streams/legacy').Stream)
+
+Stream.isDisturbed = utils.isDisturbed
+Stream.isErrored = utils.isErrored
+Stream.isReadable = utils.isReadable
+Stream.Readable = require('./internal/streams/readable')
+
+for (const key of ObjectKeys(streamReturningOperators)) {
+ const op = streamReturningOperators[key]
+
+ function fn(...args) {
+ if (new.target) {
+ throw ERR_ILLEGAL_CONSTRUCTOR()
+ }
+
+ return Stream.Readable.from(ReflectApply(op, this, args))
+ }
+
+ ObjectDefineProperty(fn, 'name', {
+ value: op.name
+ })
+ ObjectDefineProperty(fn, 'length', {
+ value: op.length
+ })
+ ObjectDefineProperty(Stream.Readable.prototype, key, {
+ value: fn,
+ enumerable: false,
+ configurable: true,
+ writable: true
+ })
+}
+
+for (const key of ObjectKeys(promiseReturningOperators)) {
+ const op = promiseReturningOperators[key]
+
+ function fn(...args) {
+ if (new.target) {
+ throw ERR_ILLEGAL_CONSTRUCTOR()
+ }
+
+ return ReflectApply(op, this, args)
+ }
+
+ ObjectDefineProperty(fn, 'name', {
+ value: op.name
+ })
+ ObjectDefineProperty(fn, 'length', {
+ value: op.length
+ })
+ ObjectDefineProperty(Stream.Readable.prototype, key, {
+ value: fn,
+ enumerable: false,
+ configurable: true,
+ writable: true
+ })
+}
+
+Stream.Writable = require('./internal/streams/writable')
+Stream.Duplex = require('./internal/streams/duplex')
+Stream.Transform = require('./internal/streams/transform')
+Stream.PassThrough = require('./internal/streams/passthrough')
+Stream.pipeline = pipeline
+
+const { addAbortSignal } = require('./internal/streams/add-abort-signal')
+
+Stream.addAbortSignal = addAbortSignal
+Stream.finished = eos
+Stream.destroy = destroyer
+Stream.compose = compose
+ObjectDefineProperty(Stream, 'promises', {
+ configurable: true,
+ enumerable: true,
+
+ get() {
+ return promises
+ }
+})
+ObjectDefineProperty(pipeline, customPromisify, {
+ enumerable: true,
+
+ get() {
+ return promises.pipeline
+ }
+})
+ObjectDefineProperty(eos, customPromisify, {
+ enumerable: true,
+
+ get() {
+ return promises.finished
+ }
+}) // Backwards-compat with node 0.4.x
+
+Stream.Stream = Stream
+
+Stream._isUint8Array = function isUint8Array(value) {
+ return value instanceof Uint8Array
+}
+
+Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) {
+ return Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
+}
diff --git a/lib/stream/promises.js b/lib/stream/promises.js
new file mode 100644
index 0000000000..5e7972ee8a
--- /dev/null
+++ b/lib/stream/promises.js
@@ -0,0 +1,43 @@
+'use strict'
+
+const { ArrayPrototypePop, Promise } = require('../ours/primordials')
+
+const { isIterable, isNodeStream } = require('../internal/streams/utils')
+
+const { pipelineImpl: pl } = require('../internal/streams/pipeline')
+
+const { finished } = require('../internal/streams/end-of-stream')
+
+function pipeline(...streams) {
+ return new Promise((resolve, reject) => {
+ let signal
+ let end
+ const lastArg = streams[streams.length - 1]
+
+ if (lastArg && typeof lastArg === 'object' && !isNodeStream(lastArg) && !isIterable(lastArg)) {
+ const options = ArrayPrototypePop(streams)
+ signal = options.signal
+ end = options.end
+ }
+
+ pl(
+ streams,
+ (err, value) => {
+ if (err) {
+ reject(err)
+ } else {
+ resolve(value)
+ }
+ },
+ {
+ signal,
+ end
+ }
+ )
+ })
+}
+
+module.exports = {
+ finished,
+ pipeline
+}
diff --git a/package.json b/package.json
index 48fa4741e3..041c331bc4 100644
--- a/package.json
+++ b/package.json
@@ -2,69 +2,82 @@
"name": "readable-stream",
"version": "3.6.0",
"description": "Streams3, a user-land copy of the stream library from Node.js",
- "main": "readable.js",
- "engines": {
- "node": ">= 6"
- },
- "dependencies": {
- "inherits": "^2.0.3",
- "string_decoder": "^1.1.1",
- "util-deprecate": "^1.0.1"
- },
- "devDependencies": {
- "@babel/cli": "^7.2.0",
- "@babel/core": "^7.2.0",
- "@babel/polyfill": "^7.0.0",
- "@babel/preset-env": "^7.2.0",
- "airtap": "^4.0.1",
- "airtap-playwright": "^1.0.1",
- "airtap-sauce": "^1.1.0",
- "assert": "^1.4.0",
- "bl": "^2.0.0",
- "deep-strict-equal": "^0.2.0",
- "events.once": "^2.0.2",
- "glob": "^7.1.2",
- "gunzip-maybe": "^1.4.1",
- "hyperquest": "^2.1.3",
- "lolex": "^2.6.0",
- "nyc": "^11.0.0",
- "pump": "^3.0.0",
- "rimraf": "^2.6.2",
- "tap": "^12.0.0",
- "tape": "^4.9.0",
- "tar-fs": "^1.16.2",
- "util-promisify": "^2.1.0"
- },
- "scripts": {
- "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js",
- "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap",
- "test-browsers": "airtap test/browser.js",
- "test-browsers-local": "airtap -p local test/browser.js",
- "cover": "nyc npm test",
- "report": "nyc report --reporter=lcov",
- "update-browser-errors": "babel -o errors-browser.js errors.js"
- },
- "repository": {
- "type": "git",
- "url": "git://github.com/nodejs/readable-stream"
- },
+ "homepage": "https://github.com/nodejs/readable-stream",
+ "license": "MIT",
+ "licenses": [
+ {
+ "type": "MIT",
+ "url": "https://choosealicense.com/licenses/mit/"
+ }
+ ],
"keywords": [
"readable",
"stream",
"pipe"
],
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/nodejs/readable-stream"
+ },
+ "bugs": {
+ "url": "https://github.com/nodejs/readable-stream/issues"
+ },
+ "main": "lib/ours/index.js",
+ "files": [
+ "lib",
+ "LICENSE",
+ "README.md"
+ ],
"browser": {
- "util": false,
- "worker_threads": false,
- "./errors": "./errors-browser.js",
- "./readable.js": "./readable-browser.js",
- "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js",
- "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
+ "util": "./lib/ours/util.js",
+ "./lib/ours/index.js": "./lib/ours/browser.js"
},
- "nyc": {
- "include": [
- "lib/**.js"
- ]
+ "scripts": {
+ "build": "node build/build.mjs",
+ "postbuild": "prettier -w lib test",
+ "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js",
+ "test:prepare": "node test/browser/runner-prepare.mjs",
+ "test:browsers": "node test/browser/runner-browser.mjs",
+ "test:bundlers": "node test/browser/runner-node.mjs",
+ "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js",
+ "format": "prettier -w src lib test",
+ "lint": "eslint src"
+ },
+ "dependencies": {
+ "abort-controller": "^3.0.0"
},
- "license": "MIT"
+ "devDependencies": {
+ "@babel/core": "^7.17.10",
+ "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7",
+ "@babel/plugin-proposal-optional-chaining": "^7.16.7",
+ "@rollup/plugin-commonjs": "^22.0.0",
+ "@rollup/plugin-inject": "^4.0.4",
+ "@rollup/plugin-node-resolve": "^13.3.0",
+ "@sinonjs/fake-timers": "^9.1.2",
+ "browserify": "^17.0.0",
+ "buffer-es6": "^4.9.3",
+ "c8": "^7.11.2",
+ "esbuild": "^0.14.39",
+ "esbuild-plugin-alias": "^0.2.1",
+ "eslint": "^8.15.0",
+ "eslint-config-standard": "^17.0.0",
+ "eslint-plugin-import": "^2.26.0",
+ "eslint-plugin-n": "^15.2.0",
+ "eslint-plugin-promise": "^6.0.0",
+ "playwright": "^1.21.1",
+ "prettier": "^2.6.2",
+ "process-es6": "^0.11.6",
+ "rollup": "^2.72.1",
+ "rollup-plugin-polyfill-node": "^0.9.0",
+ "tap": "^16.2.0",
+ "tap-mocha-reporter": "^5.0.3",
+ "tape": "^5.5.3",
+ "tar": "^6.1.11",
+ "undici": "^5.1.1",
+ "webpack": "^5.72.1",
+ "webpack-cli": "^4.9.2"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ }
}
diff --git a/prettier.config.cjs b/prettier.config.cjs
new file mode 100644
index 0000000000..3f8a79d266
--- /dev/null
+++ b/prettier.config.cjs
@@ -0,0 +1,7 @@
+module.exports = {
+ printWidth: 120,
+ semi: false,
+ singleQuote: true,
+ bracketSpacing: true,
+ trailingComma: 'none'
+}
diff --git a/readable-browser.js b/readable-browser.js
deleted file mode 100644
index adbf60de83..0000000000
--- a/readable-browser.js
+++ /dev/null
@@ -1,9 +0,0 @@
-exports = module.exports = require('./lib/_stream_readable.js');
-exports.Stream = exports;
-exports.Readable = exports;
-exports.Writable = require('./lib/_stream_writable.js');
-exports.Duplex = require('./lib/_stream_duplex.js');
-exports.Transform = require('./lib/_stream_transform.js');
-exports.PassThrough = require('./lib/_stream_passthrough.js');
-exports.finished = require('./lib/internal/streams/end-of-stream.js');
-exports.pipeline = require('./lib/internal/streams/pipeline.js');
diff --git a/readable.js b/readable.js
deleted file mode 100644
index 9e0ca120de..0000000000
--- a/readable.js
+++ /dev/null
@@ -1,16 +0,0 @@
-var Stream = require('stream');
-if (process.env.READABLE_STREAM === 'disable' && Stream) {
- module.exports = Stream.Readable;
- Object.assign(module.exports, Stream);
- module.exports.Stream = Stream;
-} else {
- exports = module.exports = require('./lib/_stream_readable.js');
- exports.Stream = Stream || exports;
- exports.Readable = exports;
- exports.Writable = require('./lib/_stream_writable.js');
- exports.Duplex = require('./lib/_stream_duplex.js');
- exports.Transform = require('./lib/_stream_transform.js');
- exports.PassThrough = require('./lib/_stream_passthrough.js');
- exports.finished = require('./lib/internal/streams/end-of-stream.js');
- exports.pipeline = require('./lib/internal/streams/pipeline.js');
-}
diff --git a/src/browser.js b/src/browser.js
new file mode 100644
index 0000000000..ac901c17e7
--- /dev/null
+++ b/src/browser.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const CustomStream = require('../stream')
+const promises = require('../stream/promises')
+const originalDestroy = CustomStream.Readable.destroy
+
+module.exports = CustomStream.Readable
+
+// Explicit export naming is needed for ESM
+module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
+module.exports._isUint8Array = CustomStream._isUint8Array
+module.exports.isDisturbed = CustomStream.isDisturbed
+module.exports.isErrored = CustomStream.isErrored
+module.exports.isReadable = CustomStream.isReadable
+module.exports.Readable = CustomStream.Readable
+module.exports.Writable = CustomStream.Writable
+module.exports.Duplex = CustomStream.Duplex
+module.exports.Transform = CustomStream.Transform
+module.exports.PassThrough = CustomStream.PassThrough
+module.exports.addAbortSignal = CustomStream.addAbortSignal
+module.exports.finished = CustomStream.finished
+module.exports.destroy = CustomStream.destroy
+module.exports.destroy = originalDestroy
+module.exports.pipeline = CustomStream.pipeline
+module.exports.compose = CustomStream.compose
+
+Object.defineProperty(CustomStream, 'promises', {
+ configurable: true,
+ enumerable: true,
+ get() {
+ return promises
+ }
+})
+
+module.exports.Stream = CustomStream.Stream
+
+// Allow default importing
+module.exports.default = module.exports
diff --git a/src/errors.js b/src/errors.js
new file mode 100644
index 0000000000..84dc5c2c9a
--- /dev/null
+++ b/src/errors.js
@@ -0,0 +1,358 @@
+'use strict'
+
+const { format, inspect, AggregateError: CustomAggregateError } = require('./util')
+
+/*
+ This file is a reduced and adapted version of the main lib/internal/errors.js file defined at
+
+ https://github.com/nodejs/node/blob/master/lib/internal/errors.js
+
+ Don't try to replace with the original file and keep it up to date (starting from E(...) definitions)
+ with the upstream file.
+*/
+
+const AggregateError = globalThis.AggregateError || CustomAggregateError
+
+const kIsNodeError = Symbol('kIsNodeError')
+const kTypes = [
+ 'string',
+ 'function',
+ 'number',
+ 'object',
+ // Accept 'Function' and 'Object' as alternative to the lower cased version.
+ 'Function',
+ 'Object',
+ 'boolean',
+ 'bigint',
+ 'symbol'
+]
+const classRegExp = /^([A-Z][a-z0-9]*)+$/
+const nodeInternalPrefix = '__node_internal_'
+const codes = {}
+
+function assert(value, message) {
+ if (!value) {
+ throw new codes.ERR_INTERNAL_ASSERTION(message)
+ }
+}
+
+// Only use this for integers! Decimal numbers do not work with this function.
+function addNumericalSeparator(val) {
+ let res = ''
+ let i = val.length
+ const start = val[0] === '-' ? 1 : 0
+ for (; i >= start + 4; i -= 3) {
+ res = `_${val.slice(i - 3, i)}${res}`
+ }
+ return `${val.slice(0, i)}${res}`
+}
+
+function getMessage(key, msg, args) {
+ if (typeof msg === 'function') {
+ assert(
+ msg.length <= args.length, // Default options do not count.
+ `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).`
+ )
+
+ return msg(...args)
+ }
+
+ const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length
+
+ assert(
+ expectedLength === args.length,
+ `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).`
+ )
+
+ if (args.length === 0) {
+ return msg
+ }
+
+ return format(msg, ...args)
+}
+
+function E(code, message, Base) {
+ if (!Base) {
+ Base = Error
+ }
+
+ class NodeError extends Base {
+ constructor(...args) {
+ super(getMessage(code, message, args))
+ }
+
+ toString() {
+ return `${this.name} [${code}]: ${this.message}`
+ }
+ }
+
+ NodeError.prototype.name = Base.name
+ NodeError.prototype.code = code
+ NodeError.prototype[kIsNodeError] = true
+ NodeError.prototype.toString = function () {
+ return `${this.name} [${code}]: ${this.message}`
+ }
+
+ codes[code] = NodeError
+}
+
+function hideStackFrames(fn) {
+ // We rename the functions that will be hidden to cut off the stacktrace
+ // at the outermost one
+ const hidden = nodeInternalPrefix + fn.name
+ Object.defineProperty(fn, 'name', { value: hidden })
+ return fn
+}
+
+function aggregateTwoErrors(innerError, outerError) {
+ if (innerError && outerError && innerError !== outerError) {
+ if (Array.isArray(outerError.errors)) {
+ // If `outerError` is already an `AggregateError`.
+ outerError.errors.push(innerError)
+ return outerError
+ }
+
+ const err = new AggregateError([outerError, innerError], outerError.message)
+ err.code = outerError.code
+ return err
+ }
+
+ return innerError || outerError
+}
+
+class AbortError extends Error {
+ constructor(message = 'The operation was aborted', options = undefined) {
+ if (options !== undefined && typeof options !== 'object') {
+ throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options)
+ }
+
+ super(message, options)
+ this.code = 'ABORT_ERR'
+ this.name = 'AbortError'
+ }
+}
+
+E('ERR_ASSERTION', '%s', Error)
+
+E(
+ 'ERR_INVALID_ARG_TYPE',
+ (name, expected, actual) => {
+ assert(typeof name === 'string', "'name' must be a string")
+
+ if (!Array.isArray(expected)) {
+ expected = [expected]
+ }
+
+ let msg = 'The '
+ if (name.endsWith(' argument')) {
+ // For cases like 'first argument'
+ msg += `${name} `
+ } else {
+ msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} `
+ }
+
+ msg += 'must be '
+
+ const types = []
+ const instances = []
+ const other = []
+
+ for (const value of expected) {
+ assert(typeof value === 'string', 'All expected entries have to be of type string')
+
+ if (kTypes.includes(value)) {
+ types.push(value.toLowerCase())
+ } else if (classRegExp.test(value)) {
+ instances.push(value)
+ } else {
+ assert(value !== 'object', 'The value "object" should be written as "Object"')
+ other.push(value)
+ }
+ }
+
+ // Special handle `object` in case other instances are allowed to outline
+ // the differences between each other.
+ if (instances.length > 0) {
+ const pos = types.indexOf('object')
+
+ if (pos !== -1) {
+ types.splice(types, pos, 1)
+ instances.push('Object')
+ }
+ }
+
+ if (types.length > 0) {
+ switch (types.length) {
+ case 1:
+ msg += `of type ${types[0]}`
+ break
+ case 2:
+ msg += `one of type ${types[0]} or ${types[1]}`
+ break
+ default: {
+ const last = types.pop()
+ msg += `one of type ${types.join(', ')}, or ${last}`
+ }
+ }
+
+ if (instances.length > 0 || other.length > 0) {
+ msg += ' or '
+ }
+ }
+
+ if (instances.length > 0) {
+ switch (instances.length) {
+ case 1:
+ msg += `an instance of ${instances[0]}`
+ break
+ case 2:
+ msg += `an instance of ${instances[0]} or ${instances[1]}`
+ break
+ default: {
+ const last = instances.pop()
+ msg += `an instance of ${instances.join(', ')}, or ${last}`
+ }
+ }
+
+ if (other.length > 0) {
+ msg += ' or '
+ }
+ }
+
+ switch (other.length) {
+ case 0:
+ break
+ case 1:
+ if (other[0].toLowerCase() !== other[0]) {
+ msg += 'an '
+ }
+
+ msg += `${other[0]}`
+ break
+ case 2:
+ msg += `one of ${other[0]} or ${other[1]}`
+ break
+ default: {
+ const last = other.pop()
+ msg += `one of ${other.join(', ')}, or ${last}`
+ }
+ }
+
+ if (actual == null) {
+ msg += `. Received ${actual}`
+ } else if (typeof actual === 'function' && actual.name) {
+ msg += `. Received function ${actual.name}`
+ } else if (typeof actual === 'object') {
+ if (actual.constructor?.name) {
+ msg += `. Received an instance of ${actual.constructor.name}`
+ } else {
+ const inspected = inspect(actual, { depth: -1 })
+ msg += `. Received ${inspected}`
+ }
+ } else {
+ let inspected = inspect(actual, { colors: false })
+ if (inspected.length > 25) {
+ inspected = `${inspected.slice(0, 25)}...`
+ }
+ msg += `. Received type ${typeof actual} (${inspected})`
+ }
+ return msg
+ },
+ TypeError
+)
+
+E(
+ 'ERR_INVALID_ARG_VALUE',
+ (name, value, reason = 'is invalid') => {
+ let inspected = inspect(value)
+ if (inspected.length > 128) {
+ inspected = inspected.slice(0, 128) + '...'
+ }
+ const type = name.includes('.') ? 'property' : 'argument'
+ return `The ${type} '${name}' ${reason}. Received ${inspected}`
+ },
+ TypeError
+)
+
+E(
+ 'ERR_INVALID_RETURN_VALUE',
+ (input, name, value) => {
+ const type = value?.constructor?.name ? `instance of ${value.constructor.name}` : `type ${typeof value}`
+ return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`
+ },
+ TypeError
+)
+
+E(
+ 'ERR_MISSING_ARGS',
+ (...args) => {
+ assert(args.length > 0, 'At least one arg needs to be specified')
+
+ let msg
+ const len = args.length
+ args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ')
+
+ switch (len) {
+ case 1:
+ msg += `The ${args[0]} argument`
+ break
+ case 2:
+ msg += `The ${args[0]} and ${args[1]} arguments`
+ break
+ default:
+ {
+ const last = args.pop()
+ msg += `The ${args.join(', ')}, and ${last} arguments`
+ }
+ break
+ }
+
+ return `${msg} must be specified`
+ },
+ TypeError
+)
+
+E(
+ 'ERR_OUT_OF_RANGE',
+ (str, range, input) => {
+ assert(range, 'Missing "range" argument')
+
+ let received
+
+ if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) {
+ received = addNumericalSeparator(String(input))
+ } else if (typeof input === 'bigint') {
+ received = String(input)
+
+ if (input > 2n ** 32n || input < -(2n ** 32n)) {
+ received = addNumericalSeparator(received)
+ }
+
+ received += 'n'
+ } else {
+ received = inspect(input)
+ }
+
+ return `The value of "${str}" is out of range. It must be ${range}. Received ${received}`
+ },
+ RangeError
+)
+
+E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error)
+E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error)
+E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error)
+E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error)
+E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error)
+E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError)
+E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error)
+E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error)
+E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error)
+E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error)
+E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError)
+
+module.exports = {
+ AbortError,
+ aggregateTwoErrors: hideStackFrames(aggregateTwoErrors),
+ hideStackFrames,
+ codes
+}
diff --git a/src/index.js b/src/index.js
new file mode 100644
index 0000000000..662b7565ac
--- /dev/null
+++ b/src/index.js
@@ -0,0 +1,71 @@
+'use strict'
+
+const Stream = require('stream')
+
+if (Stream && process.env.READABLE_STREAM === 'disable') {
+ const promises = Stream.promises
+
+ // Explicit export naming is needed for ESM
+ module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer
+ module.exports._isUint8Array = Stream._isUint8Array
+ module.exports.isDisturbed = Stream.isDisturbed
+ module.exports.isErrored = Stream.isErrored
+ module.exports.isReadable = Stream.isReadable
+ module.exports.Readable = Stream.Readable
+ module.exports.Writable = Stream.Writable
+ module.exports.Duplex = Stream.Duplex
+ module.exports.Transform = Stream.Transform
+ module.exports.PassThrough = Stream.PassThrough
+ module.exports.addAbortSignal = Stream.addAbortSignal
+ module.exports.finished = Stream.finished
+ module.exports.destroy = Stream.destroy
+ module.exports.pipeline = Stream.pipeline
+ module.exports.compose = Stream.compose
+
+ Object.defineProperty(Stream, 'promises', {
+ configurable: true,
+ enumerable: true,
+ get() {
+ return promises
+ }
+ })
+
+ module.exports.Stream = Stream.Stream
+} else {
+ const CustomStream = require('../stream')
+ const promises = require('../stream/promises')
+ const originalDestroy = CustomStream.Readable.destroy
+
+ module.exports = CustomStream.Readable
+
+ // Explicit export naming is needed for ESM
+ module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
+ module.exports._isUint8Array = CustomStream._isUint8Array
+ module.exports.isDisturbed = CustomStream.isDisturbed
+ module.exports.isErrored = CustomStream.isErrored
+ module.exports.isReadable = CustomStream.isReadable
+ module.exports.Readable = CustomStream.Readable
+ module.exports.Writable = CustomStream.Writable
+ module.exports.Duplex = CustomStream.Duplex
+ module.exports.Transform = CustomStream.Transform
+ module.exports.PassThrough = CustomStream.PassThrough
+ module.exports.addAbortSignal = CustomStream.addAbortSignal
+ module.exports.finished = CustomStream.finished
+ module.exports.destroy = CustomStream.destroy
+ module.exports.destroy = originalDestroy
+ module.exports.pipeline = CustomStream.pipeline
+ module.exports.compose = CustomStream.compose
+
+ Object.defineProperty(CustomStream, 'promises', {
+ configurable: true,
+ enumerable: true,
+ get() {
+ return promises
+ }
+ })
+
+ module.exports.Stream = CustomStream.Stream
+}
+
+// Allow default importing
+module.exports.default = module.exports
diff --git a/src/primordials.js b/src/primordials.js
new file mode 100644
index 0000000000..14e2680bbc
--- /dev/null
+++ b/src/primordials.js
@@ -0,0 +1,101 @@
+'use strict'
+
+/*
+ This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at
+
+ https://github.com/nodejs/node/blob/master/lib/internal/per_context/primordials.js
+
+ Don't try to replace with the original file and keep it up to date with the upstream file.
+*/
+
+module.exports = {
+ ArrayIsArray(self) {
+ return Array.isArray(self)
+ },
+ ArrayPrototypeIncludes(self, el) {
+ return self.includes(el)
+ },
+ ArrayPrototypeIndexOf(self, el) {
+ return self.indexOf(el)
+ },
+ ArrayPrototypeJoin(self, sep) {
+ return self.join(sep)
+ },
+ ArrayPrototypeMap(self, fn) {
+ return self.map(fn)
+ },
+ ArrayPrototypePop(self, el) {
+ return self.pop(el)
+ },
+ ArrayPrototypePush(self, el) {
+ return self.push(el)
+ },
+ ArrayPrototypeSlice(self, start, end) {
+ return self.slice(start, end)
+ },
+ Error,
+ FunctionPrototypeCall(fn, thisArgs, ...args) {
+ return fn.call(thisArgs, ...args)
+ },
+ FunctionPrototypeSymbolHasInstance(self, instance) {
+ return Function.prototype[Symbol.hasInstance].call(self, instance)
+ },
+ MathFloor: Math.floor,
+ Number,
+ NumberIsInteger: Number.isInteger,
+ NumberIsNaN: Number.isNaN,
+ NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER,
+ NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER,
+ NumberParseInt: Number.parseInt,
+ ObjectDefineProperties(self, props) {
+ return Object.defineProperties(self, props)
+ },
+ ObjectDefineProperty(self, name, prop) {
+ return Object.defineProperty(self, name, prop)
+ },
+ ObjectGetOwnPropertyDescriptor(self, name) {
+ return Object.getOwnPropertyDescriptor(self, name)
+ },
+ ObjectKeys(obj) {
+ return Object.keys(obj)
+ },
+ ObjectSetPrototypeOf(target, proto) {
+ return Object.setPrototypeOf(target, proto)
+ },
+ Promise,
+ PromisePrototypeCatch(self, fn) {
+ return self.catch(fn)
+ },
+ PromisePrototypeThen(self, thenFn, catchFn) {
+ return self.then(thenFn, catchFn)
+ },
+ PromiseReject(err) {
+ return Promise.reject(err)
+ },
+ ReflectApply: Reflect.apply,
+ RegExpPrototypeTest(self, value) {
+ return self.test(value)
+ },
+ SafeSet: Set,
+ String,
+ StringPrototypeSlice(self, start, end) {
+ return self.slice(start, end)
+ },
+ StringPrototypeToLowerCase(self) {
+ return self.toLowerCase()
+ },
+ StringPrototypeToUpperCase(self) {
+ return self.toUpperCase()
+ },
+ StringPrototypeTrim(self) {
+ return self.trim()
+ },
+ Symbol,
+ SymbolAsyncIterator: Symbol.asyncIterator,
+ SymbolHasInstance: Symbol.hasInstance,
+ SymbolIterator: Symbol.iterator,
+ TypedArrayPrototypeSet(self, buf, len) {
+ return self.set(buf, len)
+ },
+ Uint8Array
+}
diff --git a/src/test/browser/fixtures/esbuild-browsers-shims.mjs b/src/test/browser/fixtures/esbuild-browsers-shims.mjs
new file mode 100644
index 0000000000..9186f40744
--- /dev/null
+++ b/src/test/browser/fixtures/esbuild-browsers-shims.mjs
@@ -0,0 +1,9 @@
+import * as bufferModule from 'buffer-es6'
+import * as processModule from 'process-es6'
+
+export const process = processModule
+export const Buffer = bufferModule.Buffer
+
+export function setImmediate(fn, ...args) {
+ setTimeout(() => fn(...args), 1)
+}
diff --git a/src/test/browser/fixtures/esbuild.browser.config.mjs b/src/test/browser/fixtures/esbuild.browser.config.mjs
new file mode 100644
index 0000000000..6dd371dd9a
--- /dev/null
+++ b/src/test/browser/fixtures/esbuild.browser.config.mjs
@@ -0,0 +1,23 @@
+import { build } from 'esbuild'
+import alias from 'esbuild-plugin-alias'
+import { createRequire } from 'module'
+
+const require = createRequire(import.meta.url)
+
+build({
+ entryPoints: ['test/browser/test-browser.js'],
+ outfile: 'tmp/esbuild/suite.browser.js',
+ bundle: true,
+ platform: 'browser',
+ plugins: [
+ alias({
+ crypto: require.resolve('crypto-browserify'),
+ path: require.resolve('path-browserify'),
+ stream: require.resolve('stream-browserify')
+ })
+ ],
+ define: {
+ global: 'globalThis'
+ },
+ inject: ['test/browser/fixtures/esbuild-browsers-shims.mjs']
+}).catch(() => process.exit(1))
diff --git a/src/test/browser/fixtures/esbuild.node.config.mjs b/src/test/browser/fixtures/esbuild.node.config.mjs
new file mode 100644
index 0000000000..21f70ad284
--- /dev/null
+++ b/src/test/browser/fixtures/esbuild.node.config.mjs
@@ -0,0 +1,8 @@
+import { build } from 'esbuild'
+
+build({
+ entryPoints: ['test/browser/test-browser.js'],
+ outfile: 'tmp/esbuild/suite.node.js',
+ bundle: true,
+ platform: 'node'
+}).catch(() => process.exit(1))
diff --git a/src/test/browser/fixtures/index.html b/src/test/browser/fixtures/index.html
new file mode 100644
index 0000000000..16b329e8e6
--- /dev/null
+++ b/src/test/browser/fixtures/index.html
@@ -0,0 +1,72 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/test/browser/fixtures/prepare.sh b/src/test/browser/fixtures/prepare.sh
new file mode 100644
index 0000000000..56380d61f4
--- /dev/null
+++ b/src/test/browser/fixtures/prepare.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+set -x -e
+
+[ "$BUNDLER" == "" ] && BUNDLER=$1
+
+if [ "$BUNDLER" != "" ]; then
+ rm -rf tmp/$BUNDLER
+ mkdir -p tmp/$BUNDLER
+ cp test/browser/fixtures/index.html tmp/$BUNDLER
+fi
+
+case $BUNDLER in
+ browserify)
+ browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js
+ browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js
+ ;;
+ esbuild)
+ node src/test/browser/fixtures/esbuild.browser.config.mjs
+ node src/test/browser/fixtures/esbuild.node.config.mjs
+ ;;
+ rollup)
+ rollup -c test/browser/fixtures/rollup.browser.config.mjs
+ rollup -c test/browser/fixtures/rollup.node.config.mjs
+ ;;
+ webpack)
+ webpack -c test/browser/fixtures/webpack.browser.config.mjs
+ webpack -c test/browser/fixtures/webpack.node.config.mjs
+ ;;
+ *)
+ echo "Please set the environment variable BUNDLER to browserify, esbuild, rollup or webpack."
+ exit 1
+ ;;
+esac
\ No newline at end of file
diff --git a/src/test/browser/fixtures/rollup.browser.config.mjs b/src/test/browser/fixtures/rollup.browser.config.mjs
new file mode 100644
index 0000000000..43d0e9f4ab
--- /dev/null
+++ b/src/test/browser/fixtures/rollup.browser.config.mjs
@@ -0,0 +1,27 @@
+import commonjs from '@rollup/plugin-commonjs'
+import inject from '@rollup/plugin-inject'
+import nodeResolve from '@rollup/plugin-node-resolve'
+import { resolve } from 'path'
+import nodePolyfill from 'rollup-plugin-polyfill-node'
+
+export default {
+ input: ['test/browser/test-browser.js'],
+ output: {
+ intro: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }',
+ file: 'tmp/rollup/suite.browser.js',
+ format: 'iife',
+ name: 'readableStreamTestSuite'
+ },
+ plugins: [
+ commonjs(),
+ nodePolyfill(),
+ inject({
+ process: resolve('node_modules/process-es6/browser.js'),
+ Buffer: [resolve('node_modules/buffer-es6/index.js'), 'Buffer']
+ }),
+ nodeResolve({
+ browser: true,
+ preferBuiltins: false
+ })
+ ]
+}
diff --git a/src/test/browser/fixtures/rollup.node.config.mjs b/src/test/browser/fixtures/rollup.node.config.mjs
new file mode 100644
index 0000000000..7eac856bce
--- /dev/null
+++ b/src/test/browser/fixtures/rollup.node.config.mjs
@@ -0,0 +1,19 @@
+import commonjs from '@rollup/plugin-commonjs'
+import nodeResolve from '@rollup/plugin-node-resolve'
+
+export default {
+ input: ['test/browser/test-browser.js'],
+ output: {
+ file: 'tmp/rollup/suite.node.js',
+ format: 'cjs',
+ name: 'readableStreamTestSuite',
+ exports: 'auto'
+ },
+ plugins: [
+ commonjs(),
+ nodeResolve({
+ browser: false,
+ preferBuiltins: true
+ })
+ ]
+}
diff --git a/src/test/browser/fixtures/webpack.browser.config.mjs b/src/test/browser/fixtures/webpack.browser.config.mjs
new file mode 100644
index 0000000000..cd40faa72c
--- /dev/null
+++ b/src/test/browser/fixtures/webpack.browser.config.mjs
@@ -0,0 +1,36 @@
+import { createRequire } from 'module'
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+import webpack from 'webpack'
+
+const require = createRequire(import.meta.url)
+const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../')
+
+export default {
+ entry: './test/browser/test-browser.js',
+ output: {
+ filename: 'suite.browser.js',
+ path: resolve(rootDir, 'tmp/webpack')
+ },
+ mode: 'production',
+ target: 'web',
+ performance: false,
+ plugins: [
+ new webpack.BannerPlugin({
+ banner: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }',
+ raw: true
+ }),
+ new webpack.ProvidePlugin({
+ process: require.resolve('process-es6'),
+ Buffer: [require.resolve('buffer-es6'), 'Buffer']
+ })
+ ],
+ resolve: {
+ aliasFields: ['browser'],
+ fallback: {
+ crypto: require.resolve('crypto-browserify'),
+ path: require.resolve('path-browserify'),
+ stream: require.resolve('stream-browserify')
+ }
+ }
+}
diff --git a/src/test/browser/fixtures/webpack.node.config.mjs b/src/test/browser/fixtures/webpack.node.config.mjs
new file mode 100644
index 0000000000..3b20bdef47
--- /dev/null
+++ b/src/test/browser/fixtures/webpack.node.config.mjs
@@ -0,0 +1,15 @@
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+
+const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../')
+
+export default {
+ entry: './test/browser/test-browser.js',
+ output: {
+ filename: 'suite.node.js',
+ path: resolve(rootDir, 'tmp/webpack')
+ },
+ mode: 'production',
+ target: 'node',
+ performance: false
+}
diff --git a/src/test/browser/runner-browser.mjs b/src/test/browser/runner-browser.mjs
new file mode 100644
index 0000000000..e8bb84482c
--- /dev/null
+++ b/src/test/browser/runner-browser.mjs
@@ -0,0 +1,109 @@
+import { resolve } from 'node:path'
+import { Readable } from 'node:stream'
+import { fileURLToPath } from 'node:url'
+import { chromium, firefox, webkit } from 'playwright'
+import reporter from 'tap-mocha-reporter'
+import Parser from 'tap-parser'
+
+const validBrowsers = ['chrome', 'firefox', 'safari', 'edge']
+const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+
+function parseEnviroment() {
+ const headless = process.env.HEADLESS !== 'false'
+ const reporter = process.env.SKIP_REPORTER !== 'true'
+
+ let [browser, bundler] = process.argv.slice(2, 4)
+
+ if (!browser) {
+ browser = process.env.BROWSER
+ }
+
+ if (!bundler) {
+ bundler = process.env.BUNDLER
+ }
+
+ if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) {
+ console.error(`Usage: node runner-browser.mjs [${validBrowsers.join('|')}] [${validBundlers.join('|')}]`)
+ console.error('You can also use the BROWSER and BUNDLER environment variables.')
+ process.exit(1)
+ }
+
+ return { browser, bundler, headless, reporter }
+}
+
+function createBrowser({ browser: id, headless }) {
+ switch (id) {
+ case 'firefox':
+ return firefox.launch({ headless })
+ case 'safari':
+ return webkit.launch({ headless })
+ case 'edge':
+ return chromium.launch({ headless, channel: 'msedge' })
+ default:
+ return chromium.launch({ headless })
+ }
+}
+
+function setupTape(page, configuration) {
+ const output = new Readable({ read() {} })
+ const parser = new Parser({ strict: true })
+
+ output.pipe(parser)
+
+ if (configuration.reporter) {
+ output.pipe(reporter('spec'))
+ }
+
+ parser.on('line', (line) => {
+ if (line !== '# readable-stream-finished\n') {
+ if (line.startsWith('# not ok')) {
+ process.exitCode = 1
+ }
+
+ if (!configuration.reporter) {
+ console.log(line.replace(/\n$/, ''))
+ }
+
+ return
+ }
+
+ output.push(null)
+
+ if (configuration.headless) {
+ browser.close()
+ }
+ })
+
+ page.on('console', (msg) => {
+ if (msg.type() === 'error') {
+ console.error(`\x1b[31m\x1b[1mconsole.error:\x1b[0m ${msg.text()}\n`)
+ return
+ }
+
+ output.push(msg.text() + '\n')
+ })
+
+ // Firefox in headless mode is showing an error even if onerror caught it. Disable in that case
+ if (!configuration.headless || configuration.browser !== 'firefox') {
+ page.on('pageerror', (err) => {
+ console.log('\x1b[31m\x1b[1m--- The browser thrown an uncaught error ---\x1b[0m')
+ console.log(err.stack)
+
+ if (configuration.headless) {
+ console.log('\x1b[31m\x1b[1m--- Exiting with exit code 1 ---\x1b[0m')
+ process.exit(1)
+ } else {
+ process.exitCode = 1
+ }
+ })
+ }
+}
+
+const configuration = parseEnviroment()
+const browser = await createBrowser(configuration)
+const page = await browser.newPage()
+setupTape(page, configuration)
+
+// Execute the test suite
+const __dirname = fileURLToPath(new URL('.', import.meta.url))
+await page.goto(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/index.html`)}`)
diff --git a/src/test/browser/runner-node.mjs b/src/test/browser/runner-node.mjs
new file mode 100644
index 0000000000..840d19e2dc
--- /dev/null
+++ b/src/test/browser/runner-node.mjs
@@ -0,0 +1,77 @@
+import { resolve } from 'node:path'
+import { Duplex } from 'node:stream'
+import { fileURLToPath } from 'node:url'
+import reporter from 'tap-mocha-reporter'
+import Parser from 'tap-parser'
+
+const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+
+function parseEnviroment() {
+ const reporter = process.env.SKIP_REPORTER !== 'true'
+ const bundler = process.argv[2] || process.env.BUNDLER
+
+ if (!validBundlers.includes(bundler)) {
+ console.error(`Usage: node runner-node.mjs [${validBundlers.join('|')}]`)
+ console.error('You can also use the BUNDLER environment variable.')
+ process.exit(1)
+ }
+
+ return { bundler, reporter }
+}
+
+function setupTape(configuration) {
+ const output = new Duplex({ read() {}, write() {} })
+ const parser = new Parser({ strict: true })
+
+ globalThis.logger = function (message, ...args) {
+ if (typeof message !== 'string') {
+ console.log(message, ...args)
+ return
+ }
+
+ output.push(message + '\n')
+ }
+
+ output.pipe(parser)
+
+ if (configuration.reporter) {
+ output.pipe(reporter('spec'))
+ }
+
+ process.on('uncaughtException', (err) => {
+ if (global.onerror) {
+ global.onerror(err)
+ } else {
+ process.removeAllListeners('uncaughtException')
+ throw err
+ }
+ })
+
+ parser.on('line', (line) => {
+ if (line === '# readable-stream-finished\n') {
+ output.push(null)
+ output.end()
+ return
+ } else if (line.startsWith('# not ok')) {
+ process.exitCode = 1
+ }
+
+ if (!configuration.reporter) {
+ console.log(line.replace(/\n$/, ''))
+ }
+ })
+}
+
+async function main() {
+ const configuration = parseEnviroment()
+ setupTape(configuration)
+
+ // Execute the test suite
+ const __dirname = fileURLToPath(new URL('.', import.meta.url))
+ await import(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/suite.node.js`)}`)
+}
+
+main().catch((e) => {
+ console.error(e)
+ process.exit(1)
+})
diff --git a/src/test/browser/runner-prepare.mjs b/src/test/browser/runner-prepare.mjs
new file mode 100644
index 0000000000..76e38f8504
--- /dev/null
+++ b/src/test/browser/runner-prepare.mjs
@@ -0,0 +1,107 @@
+import { exec } from 'child_process'
+import { promises } from 'fs'
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+import util from '../../lib/ours/util.js'
+const { copyFile, mkdir, rmdir } = promises
+
+function highlightFile(file) {
+ return `\x1b[33m${file.replace(process.cwd() + '/', '')}\x1b[0m`
+}
+
+function info(message) {
+ console.log(`\x1b[34m[INFO]\x1b[0m ${message}`)
+}
+
+function error(message) {
+ console.log(`\x1b[31m[INFO]\x1b[0m ${message}`)
+}
+
+async function run(command) {
+ info(`Executing \x1b[33m${command}\x1b[0m ...`)
+ const { promise, reject, resolve } = util.createDeferredPromise()
+
+ let hasOutput = false
+ function logOutput(chunk) {
+ if (!hasOutput) {
+ hasOutput = true
+ console.log('')
+ }
+
+ console.log(chunk.toString('utf-8').trim().replace(/^/gm, ' '))
+ }
+
+ try {
+ const process = exec(command, { stdio: 'pipe' }, (error) => {
+ if (error) {
+ return reject(error)
+ }
+
+ resolve(error)
+ })
+
+ process.stdout.on('data', logOutput)
+ process.stderr.on('data', logOutput)
+ await promise
+
+ if (hasOutput) {
+ console.log('')
+ }
+ } catch (e) {
+ if (hasOutput) {
+ console.log('')
+ }
+
+ error(`Command failed with status code ${e.code}.`)
+ process.exit(1)
+ }
+}
+
+async function main() {
+ const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+ const bundler = process.argv[2] || process.env.BUNDLER
+
+ if (!validBundlers.includes(bundler)) {
+ error(`Usage: node await runner-prepare.mjs [${validBundlers.join('|')}]`)
+ error('You can also use the BUNDLER environment variable.')
+ process.exit(1)
+ }
+
+ const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), `../../tmp/${bundler}`)
+ const sourceIndex = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../test/browser/fixtures/index.html')
+ const targetIndex = resolve(rootDir, 'index.html')
+
+ info(`Emptying directory ${highlightFile(rootDir)} ...`)
+ try {
+ await rmdir(rootDir, { recursive: true })
+ } catch (e) {
+ // No-op
+ }
+ await mkdir(rootDir, { recursive: true })
+
+ info(`Copying file ${highlightFile(sourceIndex)} to ${highlightFile(targetIndex)} ...`)
+ await copyFile(sourceIndex, targetIndex)
+
+ switch (bundler) {
+ case 'browserify':
+ await run('browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js')
+ await run('browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js')
+ break
+ case 'esbuild':
+ await run('node src/test/browser/fixtures/esbuild.browser.config.mjs')
+ await run('node src/test/browser/fixtures/esbuild.node.config.mjs')
+ break
+ case 'rollup':
+ await run('rollup -c test/browser/fixtures/rollup.browser.config.mjs')
+ await run('rollup -c test/browser/fixtures/rollup.node.config.mjs')
+ break
+ case 'webpack':
+ await run('webpack -c test/browser/fixtures/webpack.browser.config.mjs')
+ await run('webpack -c test/browser/fixtures/webpack.node.config.mjs')
+ }
+}
+
+main().catch((e) => {
+ error(e)
+ process.exit(1)
+})
diff --git a/src/test/browser/symbols.js b/src/test/browser/symbols.js
new file mode 100644
index 0000000000..8450b8f64c
--- /dev/null
+++ b/src/test/browser/symbols.js
@@ -0,0 +1,6 @@
+'use strict'
+
+module.exports = {
+ kReadableStreamSuiteName: Symbol('readable-stream.suiteName'),
+ kReadableStreamSuiteHasMultipleTests: Symbol('readable-stream.suiteHasMultipleTests')
+}
diff --git a/src/test/browser/test-browser.js b/src/test/browser/test-browser.js
new file mode 100644
index 0000000000..a0dbab5e5c
--- /dev/null
+++ b/src/test/browser/test-browser.js
@@ -0,0 +1,136 @@
+'use strict'
+
+const logger = globalThis.logger || console.log
+const tape = require('tape')
+const { createDeferredPromise } = require('../../lib/ours/util')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+let totalTests = 0
+let completed = 0
+let failed = 0
+
+async function test(rootName, fn) {
+ // Gather all tests in the file
+ const tests = {}
+
+ function addTests(name, fn) {
+ tests[`${rootName} - ${name}`] = fn
+ }
+
+ if (fn[kReadableStreamSuiteHasMultipleTests]) {
+ fn(addTests)
+ } else {
+ tests[rootName] = fn
+ }
+
+ // Execute each test in a separate harness and then output overall results
+ for (const [name, subtest] of Object.entries(tests)) {
+ const currentIndex = ++totalTests
+ const harness = tape.createHarness()
+ const { promise, resolve } = createDeferredPromise()
+
+ const messages = [`# Subtest: ${name}`]
+
+ harness.createStream().on('data', function (row) {
+ if (row.startsWith('TAP version') || row.match(new RegExp(`^# (?:${name})`))) {
+ return
+ }
+
+ messages.push(row.trim().replace(/^/gm, ' '))
+ })
+
+ harness.onFinish(() => {
+ const success = harness._exitCode === 0
+
+ messages.push(`${success ? 'ok' : 'not ok'} ${currentIndex} - ${name}`)
+ logger(messages.join('\n'))
+ completed++
+
+ if (!success) {
+ failed++
+ }
+
+ resolve()
+ })
+
+ harness(name, subtest)
+
+ await promise
+ }
+}
+
+async function runTests(suites) {
+ // Setup an interval
+ const interval = setInterval(() => {
+ if (completed < totalTests) {
+ return
+ }
+
+ clearInterval(interval)
+
+ logger(`1..${totalTests}`)
+ logger(`# tests ${totalTests}`)
+ logger(`# pass ${completed - failed}`)
+ logger(`# fail ${failed}`)
+ logger(`# ${failed === 0 ? 'ok' : 'not ok'}`)
+
+ // This line is used by the playwright script to detect we're done
+ logger('# readable-stream-finished')
+ }, 100)
+
+ // Execute each test serially, to avoid side-effects errors when dealing with global error handling
+ for (const suite of suites) {
+ await test(suite[kReadableStreamSuiteName], suite)
+ }
+}
+
+// Important: Do not try to make the require dynamic because bundlers will not like it
+runTests([
+ require('./test-stream-big-packet'),
+ require('./test-stream-big-push'),
+ require('./test-stream-duplex'),
+ require('./test-stream-end-paused'),
+ require('./test-stream-finished'),
+ require('./test-stream-ispaused'),
+ require('./test-stream-pipe-after-end'),
+ require('./test-stream-pipe-cleanup-pause'),
+ require('./test-stream-pipe-cleanup'),
+ require('./test-stream-pipe-error-handling'),
+ require('./test-stream-pipe-event'),
+ require('./test-stream-pipe-without-listenerCount'),
+ require('./test-stream-pipeline'),
+ require('./test-stream-push-order'),
+ require('./test-stream-push-strings'),
+ require('./test-stream-readable-constructor-set-methods'),
+ require('./test-stream-readable-event'),
+ require('./test-stream-sync-write'),
+ require('./test-stream-transform-constructor-set-methods'),
+ require('./test-stream-transform-objectmode-falsey-value'),
+ require('./test-stream-transform-split-objectmode'),
+ require('./test-stream-unshift-empty-chunk'),
+ require('./test-stream-unshift-read-race'),
+ require('./test-stream-writable-change-default-encoding'),
+ require('./test-stream-writable-constructor-set-methods'),
+ require('./test-stream-writable-decoded-encoding'),
+ require('./test-stream-writev'),
+ require('./test-stream2-base64-single-char-read-end'),
+ require('./test-stream2-compatibility'),
+ require('./test-stream2-large-read-stall'),
+ require('./test-stream2-objects'),
+ require('./test-stream2-pipe-error-handling'),
+ require('./test-stream2-pipe-error-once-listener'),
+ require('./test-stream2-push'),
+ require('./test-stream2-readable-empty-buffer-no-eof'),
+ require('./test-stream2-readable-from-list'),
+ require('./test-stream2-readable-legacy-drain'),
+ require('./test-stream2-readable-non-empty-end'),
+ require('./test-stream2-readable-wrap-empty'),
+ require('./test-stream2-readable-wrap'),
+ require('./test-stream2-set-encoding'),
+ require('./test-stream2-transform'),
+ require('./test-stream2-unpipe-drain'),
+ require('./test-stream2-writable'),
+ require('./test-stream3-pause-then-read')
+]).catch((e) => {
+ console.error(e)
+})
diff --git a/src/test/browser/test-stream-big-packet.js b/src/test/browser/test-stream-big-packet.js
new file mode 100644
index 0000000000..8859e4b441
--- /dev/null
+++ b/src/test/browser/test-stream-big-packet.js
@@ -0,0 +1,70 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(3)
+
+ let passed = false
+
+ function PassThrough() {
+ Transform.call(this)
+ }
+ inherits(PassThrough, Transform)
+
+ PassThrough.prototype._transform = function (chunk, encoding, done) {
+ this.push(chunk)
+ done()
+ }
+
+ function TestStream() {
+ Transform.call(this)
+ }
+ inherits(TestStream, Transform)
+
+ TestStream.prototype._transform = function (chunk, encoding, done) {
+ if (!passed) {
+ // Char 'a' only exists in the last write
+ passed = indexOf(chunk.toString(), 'a') >= 0
+ }
+ if (passed) {
+ t.ok(passed)
+ }
+ done()
+ }
+
+ const s1 = new PassThrough()
+ const s2 = new PassThrough()
+ const s3 = new TestStream()
+
+ s1.pipe(s3)
+ // Don't let s2 auto close which may close s3
+ s2.pipe(s3, { end: false })
+
+ // We must write a buffer larger than highWaterMark
+ const big = Buffer.alloc(s1._writableState.highWaterMark + 1)
+ big.fill('x')
+
+ // Since big is larger than highWaterMark, it will be buffered internally.
+ t.notOk(s1.write(big))
+
+ // 'tiny' is small enough to pass through internal buffer.
+ t.ok(s2.write('tiny'))
+
+ // Write some small data in next IO loop, which will never be written to s3
+ // Because 'drain' event is not emitted from s1 and s1 is still paused
+ setImmediate(s1.write.bind(s1), 'later')
+
+ function indexOf(xs, x) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) {
+ return i
+ }
+ }
+ return -1
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-big-packet'
diff --git a/src/test/browser/test-stream-big-push.js b/src/test/browser/test-stream-big-push.js
new file mode 100644
index 0000000000..08d8873f3a
--- /dev/null
+++ b/src/test/browser/test-stream-big-push.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(10)
+
+ const str = 'asdfasdfasdfasdfasdf'
+
+ const r = new Readable({
+ highWaterMark: 5,
+ encoding: 'utf8'
+ })
+
+ let reads = 0
+ let eofed = false
+ let ended = false
+
+ r._read = function (n) {
+ if (reads === 0) {
+ setTimeout(function () {
+ r.push(str)
+ })
+ reads++
+ } else if (reads === 1) {
+ const ret = r.push(str)
+ t.equal(ret, false)
+ reads++
+ } else {
+ t.notOk(eofed)
+ eofed = true
+ r.push(null)
+ }
+ }
+
+ r.on('end', function () {
+ ended = true
+ })
+
+ // push some data in to start.
+ // we've never gotten any read event at this point.
+ const ret = r.push(str)
+
+ // should be false. > hwm
+ t.notOk(ret)
+ let chunk = r.read()
+ t.equal(chunk, str)
+
+ chunk = r.read()
+ t.equal(chunk, null)
+
+ r.once('readable', function () {
+ // this time, we'll get *all* the remaining data, because
+ // it's been added synchronously, as the read WOULD take
+ // us below the hwm, and so it triggered a _read() again,
+ // which synchronously added more, which we then return.
+ chunk = r.read()
+ t.equal(chunk, str + str)
+
+ chunk = r.read()
+ t.equal(chunk, null)
+ })
+
+ r.on('end', function () {
+ t.ok(eofed)
+ t.ok(ended)
+ t.equal(reads, 2)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-big-push'
diff --git a/src/test/browser/test-stream-duplex.js b/src/test/browser/test-stream-duplex.js
new file mode 100644
index 0000000000..1278591382
--- /dev/null
+++ b/src/test/browser/test-stream-duplex.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const { Duplex } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(4)
+
+ const stream = new Duplex({ objectMode: true })
+
+ t.ok(stream._readableState.objectMode)
+ t.ok(stream._writableState.objectMode)
+
+ let written
+ let read
+
+ stream._write = function (obj, _, cb) {
+ written = obj
+ cb()
+ }
+
+ stream._read = function () {}
+
+ stream.on('data', function (obj) {
+ read = obj
+ })
+
+ stream.on('end', function () {
+ t.equal(read.val, 1)
+ t.equal(written.val, 2)
+ })
+
+ stream.push({ val: 1 })
+ stream.end({ val: 2 })
+ stream.push(null)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-duplex'
diff --git a/src/test/browser/test-stream-end-paused.js b/src/test/browser/test-stream-end-paused.js
new file mode 100644
index 0000000000..76a98da510
--- /dev/null
+++ b/src/test/browser/test-stream-end-paused.js
@@ -0,0 +1,32 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(2)
+
+ const stream = new Readable()
+ let calledRead = false
+
+ stream._read = function () {
+ t.notOk(calledRead)
+ calledRead = true
+ this.push(null)
+ }
+
+ stream.on('data', function () {
+ throw new Error('should not ever get data')
+ })
+
+ stream.pause()
+
+ setTimeout(function () {
+ stream.on('end', function () {
+ t.ok(calledRead)
+ })
+ stream.resume()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-end-paused'
diff --git a/src/test/browser/test-stream-finished.js b/src/test/browser/test-stream-finished.js
new file mode 100644
index 0000000000..f9ddc907f9
--- /dev/null
+++ b/src/test/browser/test-stream-finished.js
@@ -0,0 +1,70 @@
+'use strict'
+
+const { Writable, Readable, Transform, finished } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('readable finished', function (t) {
+ t.plan(1)
+
+ const rs = new Readable({
+ read: function read() {}
+ })
+
+ finished(rs, (err) => {
+ t.ifErr(err)
+ })
+
+ rs.push(null)
+ rs.resume()
+ })
+
+ test('writable finished', function (t) {
+ t.plan(1)
+
+ const ws = new Writable({
+ write: function write(data, enc, cb) {
+ cb()
+ }
+ })
+
+ finished(ws, (err) => {
+ t.ifErr(err)
+ })
+
+ ws.end()
+ })
+
+ test('transform finished', function (t) {
+ t.plan(3)
+
+ const tr = new Transform({
+ transform: function transform(data, enc, cb) {
+ cb()
+ }
+ })
+
+ let finish = false
+ let ended = false
+
+ tr.on('end', function () {
+ ended = true
+ })
+
+ tr.on('finish', function () {
+ finish = true
+ })
+
+ finished(tr, (err) => {
+ t.ifErr(err)
+ t.ok(finish)
+ t.ok(ended)
+ })
+
+ tr.end()
+ tr.resume()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-finished'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-ispaused.js b/src/test/browser/test-stream-ispaused.js
new file mode 100644
index 0000000000..36e55d7c9e
--- /dev/null
+++ b/src/test/browser/test-stream-ispaused.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(4)
+
+ const readable = new stream.Readable()
+
+ // _read is a noop, here.
+ readable._read = () => {}
+
+ // default state of a stream is not "paused"
+ t.notOk(readable.isPaused())
+
+ // make the stream start flowing...
+ readable.on('data', () => {})
+
+ // still not paused.
+ t.notOk(readable.isPaused())
+
+ readable.pause()
+ t.ok(readable.isPaused())
+ readable.resume()
+ t.notOk(readable.isPaused())
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-ispaused'
diff --git a/src/test/browser/test-stream-pipe-after-end.js b/src/test/browser/test-stream-pipe-after-end.js
new file mode 100644
index 0000000000..13aac69313
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-after-end.js
@@ -0,0 +1,69 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(4)
+
+ function TestReadable(opt) {
+ if (!(this instanceof TestReadable)) {
+ return new TestReadable(opt)
+ }
+ Readable.call(this, opt)
+ this._ended = false
+ }
+ inherits(TestReadable, Readable)
+
+ TestReadable.prototype._read = function (n) {
+ if (this._ended) {
+ this.emit('error', new Error('_read called twice'))
+ }
+ this._ended = true
+ this.push(null)
+ }
+
+ function TestWritable(opt) {
+ if (!(this instanceof TestWritable)) {
+ return new TestWritable(opt)
+ }
+ Writable.call(this, opt)
+ this._written = []
+ }
+ inherits(TestWritable, Writable)
+
+ TestWritable.prototype._write = function (chunk, encoding, cb) {
+ this._written.push(chunk)
+ cb()
+ }
+
+ // this one should not emit 'end' until we read() from it later.
+ const ender = new TestReadable()
+ let enderEnded = false
+
+ // what happens when you pipe() a Readable that's already ended?
+ const piper = new TestReadable()
+ // pushes EOF null, and length=0, so this will trigger 'end'
+ piper.read()
+
+ setTimeout(function () {
+ ender.on('end', function () {
+ enderEnded = true
+ t.ok(true, 'enderEnded')
+ })
+ t.notOk(enderEnded)
+
+ const c = ender.read()
+ t.equal(c, null)
+
+ const w = new TestWritable()
+ w.on('finish', function () {
+ t.ok(true, 'writableFinished')
+ })
+
+ piper.pipe(w)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-after-end'
diff --git a/src/test/browser/test-stream-pipe-cleanup-pause.js b/src/test/browser/test-stream-pipe-cleanup-pause.js
new file mode 100644
index 0000000000..53078d3b7b
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-cleanup-pause.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(3)
+
+ const reader = new stream.Readable()
+ const writer1 = new stream.Writable()
+ const writer2 = new stream.Writable()
+
+ // 560000 is chosen here because it is larger than the (default) highWaterMark
+ // and will cause `.write()` to return false
+ // See: https://github.com/nodejs/node/issues/2323
+ const buffer = Buffer.alloc(560000)
+
+ reader._read = function () {}
+
+ writer1._write = function (chunk, encoding, cb) {
+ this.emit('chunk-received')
+ cb()
+ }
+
+ writer1.on('chunk-received', function () {
+ reader.unpipe(writer1)
+ reader.pipe(writer2)
+ reader.push(buffer)
+
+ setImmediate(function () {
+ reader.push(buffer)
+
+ setImmediate(function () {
+ reader.push(buffer)
+ })
+ })
+ })
+
+ writer2._write = function (chunk, encoding, cb) {
+ t.ok(true)
+ cb()
+ }
+
+ reader.pipe(writer1)
+ reader.push(buffer)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup-pause'
diff --git a/src/test/browser/test-stream-pipe-cleanup.js b/src/test/browser/test-stream-pipe-cleanup.js
new file mode 100644
index 0000000000..9dcf0dad90
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-cleanup.js
@@ -0,0 +1,117 @@
+'use strict'
+// This test asserts that Stream.prototype.pipe does not leave listeners
+// hanging on the source or dest.
+
+const inherits = require('inherits')
+const { Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(27)
+
+ if (/^v0\.8\./.test(process.version)) {
+ return
+ }
+
+ function Writable() {
+ this.writable = true
+ this.endCalls = 0
+ Stream.call(this)
+ }
+ inherits(Writable, Stream)
+
+ Writable.prototype.end = function () {
+ this.endCalls++
+ }
+
+ Writable.prototype.destroy = function () {
+ this.endCalls++
+ }
+
+ function Readable() {
+ this.readable = true
+ Stream.call(this)
+ }
+
+ inherits(Readable, Stream)
+
+ Readable.prototype._read = function () {}
+
+ function Duplex() {
+ this.readable = true
+ Writable.call(this)
+ }
+
+ inherits(Duplex, Writable)
+
+ Duplex.prototype._read = function () {}
+
+ let i = 0
+ let r
+ let w = new Writable()
+ const limit = 100
+
+ for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('end')
+ }
+ t.equal(0, r.listeners('end').length)
+ t.equal(limit, w.endCalls)
+
+ w.endCalls = 0
+
+ for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('close')
+ }
+ t.equal(0, r.listeners('close').length)
+ t.equal(limit, w.endCalls)
+
+ w.endCalls = 0
+
+ r = new Readable()
+
+ for (i = 0; i < limit; i++) {
+ w = new Writable()
+ r.pipe(w)
+ w.emit('close')
+ }
+ t.equal(0, w.listeners('close').length)
+
+ r = new Readable()
+ w = new Writable()
+ const d = new Duplex()
+ r.pipe(d) // pipeline A
+ d.pipe(w) // pipeline B
+ t.equal(r.listeners('end').length, 2) // A.onend, A.cleanup
+ t.equal(r.listeners('close').length, 2) // A.onclose, A.cleanup
+ t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup
+ t.equal(d.listeners('close').length, 3) // A.cleanup, B.onclose, B.cleanup
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 1) // B.cleanup
+
+ r.emit('end')
+ t.equal(d.endCalls, 1)
+ t.equal(w.endCalls, 0)
+ t.equal(r.listeners('end').length, 0)
+ t.equal(r.listeners('close').length, 0)
+ t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup
+ t.equal(d.listeners('close').length, 2) // B.onclose, B.cleanup
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 1) // B.cleanup
+
+ d.emit('end')
+ t.equal(d.endCalls, 1)
+ t.equal(w.endCalls, 1)
+ t.equal(r.listeners('end').length, 0)
+ t.equal(r.listeners('close').length, 0)
+ t.equal(d.listeners('end').length, 0)
+ t.equal(d.listeners('close').length, 0)
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 0)
+ d.end()
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup'
diff --git a/src/test/browser/test-stream-pipe-error-handling.js b/src/test/browser/test-stream-pipe-error-handling.js
new file mode 100644
index 0000000000..7cbfbcabb4
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-error-handling.js
@@ -0,0 +1,111 @@
+'use strict'
+
+const { Readable, Writable, Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('Error Listener Catches', function (t) {
+ t.plan(1)
+
+ const source = new Stream()
+ const dest = new Stream()
+
+ source._read = function () {}
+ source.pipe(dest)
+
+ let gotErr = null
+ source.on('error', function (err) {
+ gotErr = err
+ })
+
+ const err = new Error('This stream turned into bacon.')
+ source.emit('error', err)
+ t.strictEqual(gotErr, err)
+ })
+
+ test('Error WithoutListener Throws', function (t) {
+ t.plan(1)
+
+ const source = new Stream()
+ const dest = new Stream()
+
+ source._read = function () {}
+ source.pipe(dest)
+
+ const err = new Error('This stream turned into bacon.')
+
+ let gotErr = null
+ try {
+ source.emit('error', err)
+ } catch (e) {
+ gotErr = e
+ }
+
+ t.strictEqual(gotErr, err)
+ })
+
+ test('Error With Removed Listener Throws', function (t) {
+ t.plan(2)
+
+ const onerror = global.onerror
+
+ const r = new Readable()
+ const w = new Writable()
+ let removed = false
+ let caught = false
+
+ global.onerror = () => {
+ t.notOk(caught)
+ global.onerror = onerror
+ return true
+ }
+
+ r._read = function () {
+ setTimeout(function () {
+ t.ok(removed)
+ w.emit('error', new Error('fail'))
+ })
+ }
+
+ w.on('error', myOnError)
+ r.pipe(w)
+ w.removeListener('error', myOnError)
+ removed = true
+
+ function myOnError(er) {
+ caught = true
+ }
+ })
+
+ test('Error Listener Catches When Wrong Listener Is Removed', function (t) {
+ t.plan(2)
+
+ const r = new Readable()
+ const w = new Writable()
+ let removed = false
+ let caught = false
+
+ r._read = function () {
+ setTimeout(function () {
+ t.ok(removed)
+ w.emit('error', new Error('fail'))
+ })
+ }
+
+ w.on('error', myOnError)
+ w._write = function () {}
+
+ r.pipe(w)
+ // Removing some OTHER random listener should not do anything
+ w.removeListener('error', function () {})
+ removed = true
+
+ function myOnError(er) {
+ t.notOk(caught)
+ caught = true
+ }
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-error-handling'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-pipe-event.js b/src/test/browser/test-stream-pipe-event.js
new file mode 100644
index 0000000000..c03180c20d
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-event.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ function Writable() {
+ this.writable = true
+ Stream.call(this)
+ }
+ inherits(Writable, Stream)
+
+ function Readable() {
+ this.readable = true
+ Stream.call(this)
+ }
+ inherits(Readable, Stream)
+
+ let passed = false
+
+ const w = new Writable()
+ w.on('pipe', function (src) {
+ passed = true
+ })
+
+ const r = new Readable()
+ r._read = function () {}
+ r.pipe(w)
+
+ t.ok(passed)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-event'
diff --git a/src/test/browser/test-stream-pipe-without-listenerCount.js b/src/test/browser/test-stream-pipe-without-listenerCount.js
new file mode 100644
index 0000000000..1e8238cd8b
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-without-listenerCount.js
@@ -0,0 +1,22 @@
+'use strict'
+
+const { Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const r = new Stream({
+ read: function () {}
+ })
+ r.listenerCount = undefined
+
+ const w = new Stream()
+ w.on('pipe', function () {
+ r.emit('error', new Error('Readable Error'))
+ })
+
+ t.throws(() => r.pipe(w), 'TypeError: this.listenerCount is not a function')
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-without-listenerCount'
diff --git a/src/test/browser/test-stream-pipeline.js b/src/test/browser/test-stream-pipeline.js
new file mode 100644
index 0000000000..232f336eb6
--- /dev/null
+++ b/src/test/browser/test-stream-pipeline.js
@@ -0,0 +1,114 @@
+'use strict'
+
+const { Readable, Writable, pipeline } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('pipeline', function (t) {
+ t.plan(3)
+
+ let finished = false
+
+ const processed = []
+ const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')]
+
+ const read = new Readable({
+ read: function read() {}
+ })
+
+ const write = new Writable({
+ write: function write(data, enc, cb) {
+ processed.push(data)
+ cb()
+ }
+ })
+
+ write.on('finish', function () {
+ finished = true
+ })
+
+ for (let i = 0; i < expected.length; i++) {
+ read.push(expected[i])
+ }
+
+ read.push(null)
+ pipeline(read, write, (err) => {
+ t.ifErr(err)
+ t.ok(finished)
+ t.deepEqual(processed, expected)
+ })
+ })
+
+ test('pipeline missing args', function (t) {
+ t.plan(3)
+
+ const _read = new Readable({
+ read: function read() {}
+ })
+
+ t.throws(function () {
+ pipeline(_read, function () {})
+ })
+
+ t.throws(function () {
+ pipeline(function () {})
+ })
+
+ t.throws(function () {
+ pipeline()
+ })
+ })
+
+ test('pipeline error', function (t) {
+ t.plan(1)
+
+ const _read2 = new Readable({
+ read: function read() {}
+ })
+
+ const _write = new Writable({
+ write: function write(data, enc, cb) {
+ cb()
+ }
+ })
+
+ _read2.push('data')
+
+ setImmediate(function () {
+ return _read2.destroy()
+ })
+
+ pipeline(_read2, _write, (err) => {
+ t.equal(err.message, 'Premature close')
+ })
+ })
+
+ test('pipeline destroy', function (t) {
+ t.plan(2)
+
+ const _read3 = new Readable({
+ read: function read() {}
+ })
+
+ const _write2 = new Writable({
+ write: function write(data, enc, cb) {
+ cb()
+ }
+ })
+
+ _read3.push('data')
+
+ setImmediate(function () {
+ return _read3.destroy(new Error('kaboom'))
+ })
+
+ const dst = pipeline(_read3, _write2, (err) => {
+ t.equal(err.message, 'kaboom')
+ })
+
+ t.equal(dst, _write2)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipeline'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-push-order.js b/src/test/browser/test-stream-push-order.js
new file mode 100644
index 0000000000..4afcf756bc
--- /dev/null
+++ b/src/test/browser/test-stream-push-order.js
@@ -0,0 +1,34 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const s = new Readable({
+ highWaterMark: 20,
+ encoding: 'ascii'
+ })
+
+ const list = ['1', '2', '3', '4', '5', '6']
+
+ s._read = function (n) {
+ const one = list.shift()
+ if (!one) {
+ s.push(null)
+ } else {
+ const two = list.shift()
+ s.push(one)
+ s.push(two)
+ }
+ }
+
+ s.read(0)
+
+ setTimeout(function () {
+ t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6')
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-push-order'
diff --git a/src/test/browser/test-stream-push-strings.js b/src/test/browser/test-stream-push-strings.js
new file mode 100644
index 0000000000..bb254c5b39
--- /dev/null
+++ b/src/test/browser/test-stream-push-strings.js
@@ -0,0 +1,57 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(2)
+
+ function MyStream(options) {
+ Readable.call(this, options)
+ this._chunks = 3
+ }
+
+ inherits(MyStream, Readable)
+
+ MyStream.prototype._read = function (n) {
+ switch (this._chunks--) {
+ case 0:
+ return this.push(null)
+ case 1:
+ return setTimeout(
+ function () {
+ this.push('last chunk')
+ }.bind(this),
+ 100
+ )
+ case 2:
+ return this.push('second to last chunk')
+ case 3:
+ return process.nextTick(
+ function () {
+ this.push('first chunk')
+ }.bind(this)
+ )
+ default:
+ throw new Error('?')
+ }
+ }
+ const expect = ['first chunksecond to last chunk', 'last chunk']
+
+ const ms = new MyStream()
+ const results = []
+ ms.on('readable', function () {
+ let chunk
+ while ((chunk = ms.read()) !== null) {
+ results.push(chunk + '')
+ }
+ })
+
+ ms.on('end', function () {
+ t.equal(ms._chunks, -1)
+ t.deepEqual(results, expect)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-push-strings'
diff --git a/src/test/browser/test-stream-readable-constructor-set-methods.js b/src/test/browser/test-stream-readable-constructor-set-methods.js
new file mode 100644
index 0000000000..6d4ff89359
--- /dev/null
+++ b/src/test/browser/test-stream-readable-constructor-set-methods.js
@@ -0,0 +1,25 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(2)
+
+ let _readCalled = false
+
+ function _read(n) {
+ _readCalled = true
+ this.push(null)
+ }
+
+ const r = new Readable({ read: _read })
+ r.resume()
+
+ setTimeout(function () {
+ t.equal(r._read, _read)
+ t.ok(_readCalled)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-readable-constructor-set-methods'
diff --git a/src/test/browser/test-stream-readable-event.js b/src/test/browser/test-stream-readable-event.js
new file mode 100644
index 0000000000..0c821409bf
--- /dev/null
+++ b/src/test/browser/test-stream-readable-event.js
@@ -0,0 +1,110 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('readable events - first', (t) => {
+ t.plan(3)
+
+ // First test, not reading when the readable is added.
+ // make sure that on('readable', ...) triggers a readable event.
+ const r = new Readable({
+ highWaterMark: 3
+ })
+
+ let _readCalled = false
+ r._read = function (n) {
+ _readCalled = true
+ }
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('blerg'))
+
+ let caughtReadable = false
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(_readCalled)
+
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+
+ test('readable events - second', (t) => {
+ t.plan(3)
+
+ // second test, make sure that readable is re-emitted if there's
+ // already a length, while it IS reading.
+
+ const r = new Readable({
+ highWaterMark: 3
+ })
+
+ let _readCalled = false
+ r._read = function (n) {
+ _readCalled = true
+ }
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('bl'))
+
+ let caughtReadable = false
+ setTimeout(function () {
+ // assert we're testing what we think we are
+ t.ok(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.ok(_readCalled)
+
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+
+ test('readable events - third', (t) => {
+ t.plan(3)
+
+ // Third test, not reading when the stream has not passed
+ // the highWaterMark but *has* reached EOF.
+ const r = new Readable({
+ highWaterMark: 30
+ })
+
+ let _readCalled = false
+ r._read = function (n) {
+ _readCalled = true
+ }
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('blerg'))
+ r.push(null)
+
+ let caughtReadable = false
+ setTimeout(function () {
+ // assert we're testing what we think we are
+ t.notOk(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(_readCalled)
+
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-readable-event'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-sync-write.js b/src/test/browser/test-stream-sync-write.js
new file mode 100644
index 0000000000..dd3a1b2539
--- /dev/null
+++ b/src/test/browser/test-stream-sync-write.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(2)
+
+ let internalCalls = 0
+ let externalCalls = 0
+
+ const InternalStream = function () {
+ Writable.call(this)
+ }
+ inherits(InternalStream, Writable)
+
+ InternalStream.prototype._write = function (chunk, encoding, callback) {
+ internalCalls++
+ callback()
+ }
+
+ const internalStream = new InternalStream()
+
+ const ExternalStream = function (writable) {
+ this._writable = writable
+ Writable.call(this)
+ }
+ inherits(ExternalStream, Writable)
+
+ ExternalStream.prototype._write = function (chunk, encoding, callback) {
+ externalCalls++
+ this._writable.write(chunk, encoding, callback)
+ }
+
+ const externalStream = new ExternalStream(internalStream)
+
+ for (let i = 0; i < 2000; i++) {
+ externalStream.write(i.toString())
+ }
+
+ externalStream.end(() => {
+ t.equal(internalCalls, 2000)
+ t.equal(externalCalls, 2000)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-sync-write'
diff --git a/src/test/browser/test-stream-transform-constructor-set-methods.js b/src/test/browser/test-stream-transform-constructor-set-methods.js
new file mode 100644
index 0000000000..c64df97dcb
--- /dev/null
+++ b/src/test/browser/test-stream-transform-constructor-set-methods.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const { Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(4)
+
+ let _transformCalled = false
+ function _transform(d, e, n) {
+ _transformCalled = true
+ n()
+ }
+
+ let _flushCalled = false
+ function _flush(n) {
+ _flushCalled = true
+ n()
+ }
+
+ const tr = new Transform({
+ transform: _transform,
+ flush: _flush
+ })
+
+ tr.end(Buffer.from('blerg'))
+ tr.resume()
+
+ tr.on('end', function () {
+ t.equal(tr._transform, _transform)
+ t.equal(tr._flush, _flush)
+ t.ok(_transformCalled)
+ t.ok(_flushCalled)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-transform-constructor-set-methods'
diff --git a/src/test/browser/test-stream-transform-objectmode-falsey-value.js b/src/test/browser/test-stream-transform-objectmode-falsey-value.js
new file mode 100644
index 0000000000..69173cce31
--- /dev/null
+++ b/src/test/browser/test-stream-transform-objectmode-falsey-value.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const { PassThrough } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(13)
+
+ const src = new PassThrough({ objectMode: true })
+ const tx = new PassThrough({ objectMode: true })
+ const dest = new PassThrough({ objectMode: true })
+
+ const expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+ const results = []
+ dest.on('end', function () {
+ t.deepEqual(results, expect)
+ })
+
+ dest.on('data', function (x) {
+ results.push(x)
+ })
+
+ src.pipe(tx).pipe(dest)
+
+ let i = -1
+ const int = setInterval(function () {
+ if (i > 10) {
+ src.end()
+ clearInterval(int)
+ } else {
+ t.ok(true)
+ src.write(i++)
+ }
+ }, 10)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-transform-objectmode-falsey-value'
diff --git a/src/test/browser/test-stream-transform-split-objectmode.js b/src/test/browser/test-stream-transform-split-objectmode.js
new file mode 100644
index 0000000000..e50ac2c251
--- /dev/null
+++ b/src/test/browser/test-stream-transform-split-objectmode.js
@@ -0,0 +1,59 @@
+'use strict'
+
+const { Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(10)
+
+ const parser = new Transform({ readableObjectMode: true })
+
+ t.ok(parser._readableState.objectMode, 'parser 1')
+ t.notOk(parser._writableState.objectMode, 'parser 2')
+ t.equals(parser._readableState.highWaterMark, 16, 'parser 3')
+ t.equals(parser._writableState.highWaterMark, 16 * 1024, 'parser 4')
+
+ parser._transform = function (chunk, enc, callback) {
+ callback(null, { val: chunk[0] })
+ }
+
+ let parsed
+
+ parser.on('data', function (obj) {
+ parsed = obj
+ })
+
+ parser.end(Buffer.from([42]))
+
+ parser.on('end', function () {
+ t.equals(parsed.val, 42, 'parser ended')
+ })
+
+ const serializer = new Transform({ writableObjectMode: true })
+
+ t.notOk(serializer._readableState.objectMode, 'serializer 1')
+ t.ok(serializer._writableState.objectMode, 'serializer 2')
+ t.equals(serializer._readableState.highWaterMark, 16 * 1024, 'serializer 3')
+ t.equals(serializer._writableState.highWaterMark, 16, 'serializer 4')
+
+ serializer._transform = function (obj, _, callback) {
+ callback(null, Buffer.from([obj.val]))
+ }
+
+ let serialized
+
+ serializer.on('data', function (chunk) {
+ serialized = chunk
+ })
+
+ serializer.write({ val: 42 })
+
+ serializer.on('end', function () {
+ t.equals(serialized[0], 42, 'searlizer ended')
+ })
+ setImmediate(function () {
+ serializer.end()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-transform-split-objectmode'
diff --git a/src/test/browser/test-stream-unshift-empty-chunk.js b/src/test/browser/test-stream-unshift-empty-chunk.js
new file mode 100644
index 0000000000..2ebbd20930
--- /dev/null
+++ b/src/test/browser/test-stream-unshift-empty-chunk.js
@@ -0,0 +1,64 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const r = new Readable()
+ let nChunks = 10
+ const chunk = Buffer.alloc(10)
+ chunk.fill('x')
+
+ r._read = function (n) {
+ setTimeout(function () {
+ r.push(--nChunks === 0 ? null : chunk)
+ })
+ }
+
+ let readAll = false
+ const seen = []
+ r.on('readable', function () {
+ let chunk
+ while ((chunk = r.read())) {
+ seen.push(chunk.toString())
+ // simulate only reading a certain amount of the data,
+ // and then putting the rest of the chunk back into the
+ // stream, like a parser might do. We just fill it with
+ // 'y' so that it's easy to see which bits were touched,
+ // and which were not.
+ const putBack = Buffer.alloc(readAll ? 0 : 5)
+ putBack.fill('y')
+ readAll = !readAll
+ r.unshift(putBack)
+ }
+ })
+
+ const expect = [
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy'
+ ]
+
+ r.on('end', function () {
+ t.deepEqual(seen, expect)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-unshift-empty-chunk'
diff --git a/src/test/browser/test-stream-unshift-read-race.js b/src/test/browser/test-stream-unshift-read-race.js
new file mode 100644
index 0000000000..a600fe1cd4
--- /dev/null
+++ b/src/test/browser/test-stream-unshift-read-race.js
@@ -0,0 +1,124 @@
+'use strict'
+
+// This test verifies that:
+// 1. unshift() does not cause colliding _read() calls.
+// 2. unshift() after the 'end' event is an error, but after the EOF
+// signalling null, it is ok, and just creates a new readable chunk.
+// 3. push() after the EOF signaling null is an error.
+// 4. _read() is not called after pushing the EOF null chunk.
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(139)
+
+ const hwm = 10
+ const r = stream.Readable({ highWaterMark: hwm })
+ const chunks = 10
+
+ const data = Buffer.alloc(chunks * hwm + Math.ceil(hwm / 2))
+ for (let i = 0; i < data.length; i++) {
+ const c = 'asdf'.charCodeAt(i % 4)
+ data[i] = c
+ }
+
+ let pos = 0
+ let pushedNull = false
+ r._read = function (n) {
+ t.notOk(pushedNull, '_read after null push')
+
+ // every third chunk is fast
+ push(!(chunks % 3))
+
+ function push(fast) {
+ t.notOk(pushedNull, 'push() after null push')
+ const c = pos >= data.length ? null : data.slice(pos, pos + n)
+ pushedNull = c === null
+ if (fast) {
+ pos += n
+ r.push(c)
+ if (c === null) {
+ pushError()
+ }
+ } else {
+ setTimeout(function () {
+ pos += n
+ r.push(c)
+ if (c === null) {
+ pushError()
+ }
+ }, 1)
+ }
+ }
+ }
+
+ function pushError() {
+ r.unshift(Buffer.allocUnsafe(1))
+ w.end()
+
+ const onerror = global.onerror
+ global.onerror = () => {
+ t.ok(true)
+ global.onerror = onerror
+ return true
+ }
+
+ r.push(Buffer.allocUnsafe(1))
+ }
+
+ const w = stream.Writable()
+ const written = []
+ w._write = function (chunk, encoding, cb) {
+ written.push(chunk.toString())
+ cb()
+ }
+
+ r.on('end', t.fail)
+
+ r.on('readable', function () {
+ let chunk
+ while ((chunk = r.read(10)) !== null) {
+ w.write(chunk)
+ if (chunk.length > 4) {
+ r.unshift(Buffer.from('1234'))
+ }
+ }
+ })
+
+ w.on('finish', function () {
+ // each chunk should start with 1234, and then be asfdasdfasdf...
+ // The first got pulled out before the first unshift('1234'), so it's
+ // lacking that piece.
+ t.equal(written[0], 'asdfasdfas')
+ let asdf = 'd'
+
+ // console.error('0: %s', written[0]);
+ for (let i = 1; i < written.length; i++) {
+ // console.error('%s: %s', i.toString(32), written[i]);
+ t.equal(written[i].slice(0, 4), '1234')
+ for (let j = 4; j < written[i].length; j++) {
+ const c = written[i].charAt(j)
+ t.equal(c, asdf)
+ switch (asdf) {
+ case 'a':
+ asdf = 's'
+ break
+ case 's':
+ asdf = 'd'
+ break
+ case 'd':
+ asdf = 'f'
+ break
+ case 'f':
+ asdf = 'a'
+ break
+ }
+ }
+ }
+
+ t.equal(written.length, 18)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-unshift-read-race'
diff --git a/src/test/browser/test-stream-writable-change-default-encoding.js b/src/test/browser/test-stream-writable-change-default-encoding.js
new file mode 100644
index 0000000000..3cfa208e41
--- /dev/null
+++ b/src/test/browser/test-stream-writable-change-default-encoding.js
@@ -0,0 +1,74 @@
+'use strict'
+
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+inherits(MyWritable, stream.Writable)
+
+MyWritable.prototype._write = function (chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
+}
+
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options)
+ this.fn = fn
+}
+
+module.exports = function (test) {
+ test('defaultCondingIsUtf8', (t) => {
+ t.plan(1)
+
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'utf8')
+ },
+ { decodeStrings: false }
+ )
+ m.write('foo')
+ m.end()
+ })
+
+ test('changeDefaultEncodingToAscii', (t) => {
+ t.plan(1)
+
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'ascii')
+ },
+ { decodeStrings: false }
+ )
+ m.setDefaultEncoding('ascii')
+ m.write('bar')
+ m.end()
+ })
+
+ test('changeDefaultEncodingToInvalidValue', (t) => {
+ t.plan(1)
+
+ t.throws(function () {
+ const m = new MyWritable(function (isBuffer, type, enc) {}, { decodeStrings: false })
+ m.setDefaultEncoding({})
+ m.write('bar')
+ m.end()
+ }, TypeError)
+ })
+
+ test('checkVairableCaseEncoding', (t) => {
+ t.plan(1)
+
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'ascii')
+ },
+ { decodeStrings: false }
+ )
+ m.setDefaultEncoding('AsCii')
+ m.write('bar')
+ m.end()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writable-change-default-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-writable-constructor-set-methods.js b/src/test/browser/test-stream-writable-constructor-set-methods.js
new file mode 100644
index 0000000000..43d935b815
--- /dev/null
+++ b/src/test/browser/test-stream-writable-constructor-set-methods.js
@@ -0,0 +1,40 @@
+'use strict'
+
+const { Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(5)
+
+ let _writeCalled = false
+ function _write(d, e, n) {
+ _writeCalled = true
+ }
+
+ const w = new Writable({ write: _write })
+ w.end(Buffer.from('blerg'))
+
+ let _writevCalled = false
+ let dLength = 0
+ function _writev(d, n) {
+ dLength = d.length
+ _writevCalled = true
+ }
+
+ const w2 = new Writable({ writev: _writev })
+ w2.cork()
+
+ w2.write(Buffer.from('blerg'))
+ w2.write(Buffer.from('blerg'))
+ w2.end()
+
+ setImmediate(function () {
+ t.equal(w._write, _write)
+ t.ok(_writeCalled)
+ t.equal(w2._writev, _writev)
+ t.equal(dLength, 2)
+ t.ok(_writevCalled)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writable-constructor-set-methods'
diff --git a/src/test/browser/test-stream-writable-decoded-encoding.js b/src/test/browser/test-stream-writable-decoded-encoding.js
new file mode 100644
index 0000000000..628349c9e0
--- /dev/null
+++ b/src/test/browser/test-stream-writable-decoded-encoding.js
@@ -0,0 +1,54 @@
+'use strict'
+
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options)
+ this.fn = fn
+}
+
+inherits(MyWritable, stream.Writable)
+
+MyWritable.prototype._write = function (chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
+}
+
+module.exports = function (test) {
+ test('decodeStringsTrue', (t) => {
+ t.plan(3)
+
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.ok(isBuffer)
+ t.equal(type, 'object')
+ t.equal(enc, 'buffer')
+ // console.log('ok - decoded string is decoded');
+ },
+ { decodeStrings: true }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
+ })
+
+ test('decodeStringsFalse', (t) => {
+ t.plan(3)
+
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.notOk(isBuffer)
+ t.equal(type, 'string')
+ t.equal(enc, 'utf8')
+ // console.log('ok - un-decoded string is not decoded');
+ },
+ { decodeStrings: false }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writable-decoded-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-writev.js b/src/test/browser/test-stream-writev.js
new file mode 100644
index 0000000000..e072bc2388
--- /dev/null
+++ b/src/test/browser/test-stream-writev.js
@@ -0,0 +1,106 @@
+'use strict'
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+const queue = []
+for (let decode = 0; decode < 2; decode++) {
+ for (let uncork = 0; uncork < 2; uncork++) {
+ for (let multi = 0; multi < 2; multi++) {
+ queue.push([!!decode, !!uncork, !!multi])
+ }
+ }
+}
+
+function runTest(decode, uncork, multi) {
+ return function (t) {
+ t.plan(8)
+
+ // console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi);
+ let counter = 0
+ let expectCount = 0
+ function cnt(msg) {
+ expectCount++
+ const expect = expectCount
+ return function (er) {
+ if (er) {
+ throw er
+ }
+ counter++
+ t.equal(counter, expect)
+ }
+ }
+
+ const w = new stream.Writable({ decodeStrings: decode })
+ w._write = function (chunk, e, cb) {
+ t.ok(false, 'Should not call _write')
+ }
+
+ const expectChunks = decode
+ ? [
+ { encoding: 'buffer', chunk: [104, 101, 108, 108, 111, 44, 32] },
+ { encoding: 'buffer', chunk: [119, 111, 114, 108, 100] },
+ { encoding: 'buffer', chunk: [33] },
+ { encoding: 'buffer', chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] },
+ { encoding: 'buffer', chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] }
+ ]
+ : [
+ { encoding: 'ascii', chunk: 'hello, ' },
+ { encoding: 'utf8', chunk: 'world' },
+ { encoding: 'buffer', chunk: [33] },
+ { encoding: 'binary', chunk: '\nand then...' },
+ { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' }
+ ]
+
+ let actualChunks
+ w._writev = function (chunks, cb) {
+ actualChunks = chunks.map(function (chunk) {
+ return {
+ encoding: chunk.encoding,
+ chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk
+ }
+ })
+ cb()
+ }
+
+ w.cork()
+ w.write('hello, ', 'ascii', cnt('hello'))
+ w.write('world', 'utf8', cnt('world'))
+
+ if (multi) {
+ w.cork()
+ }
+
+ w.write(Buffer.from('!'), 'buffer', cnt('!'))
+ w.write('\nand then...', 'binary', cnt('and then'))
+
+ if (multi) {
+ w.uncork()
+ }
+
+ w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'))
+
+ if (uncork) {
+ w.uncork()
+ }
+
+ w.end(cnt('end'))
+
+ w.on('finish', function () {
+ // make sure finish comes after all the write cb
+ cnt('finish')()
+ t.deepEqual(expectChunks, actualChunks)
+ })
+ }
+}
+
+module.exports = function (test) {
+ for (let i = 0; i < queue.length; i++) {
+ const tr = queue[i]
+
+ test('round ' + i, runTest(tr[0], tr[1], tr[2]))
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writev'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-base64-single-char-read-end.js b/src/test/browser/test-stream2-base64-single-char-read-end.js
new file mode 100644
index 0000000000..5b7c131d52
--- /dev/null
+++ b/src/test/browser/test-stream2-base64-single-char-read-end.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const src = new Readable({ encoding: 'base64' })
+ const dst = new Writable()
+ let hasRead = false
+ const accum = []
+
+ src._read = function (n) {
+ if (!hasRead) {
+ hasRead = true
+ process.nextTick(function () {
+ src.push(Buffer.from('1'))
+ src.push(null)
+ })
+ }
+ }
+
+ dst._write = function (chunk, enc, cb) {
+ accum.push(chunk)
+ cb()
+ }
+
+ src.on('end', function () {
+ t.equal(Buffer.concat(accum) + '', 'MQ==')
+ clearTimeout(timeout)
+ })
+
+ src.pipe(dst)
+
+ const timeout = setTimeout(function () {
+ t.fail('timed out waiting for _write')
+ }, 100)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-base64-single-char-read-end'
diff --git a/src/test/browser/test-stream2-compatibility.js b/src/test/browser/test-stream2-compatibility.js
new file mode 100644
index 0000000000..6709029562
--- /dev/null
+++ b/src/test/browser/test-stream2-compatibility.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ let ondataCalled = 0
+
+ function TestReader() {
+ Readable.apply(this)
+ this._buffer = Buffer.alloc(100)
+ this._buffer.fill('x')
+
+ this.on('data', function () {
+ ondataCalled++
+ })
+ }
+
+ inherits(TestReader, Readable)
+
+ TestReader.prototype._read = function (n) {
+ this.push(this._buffer)
+ this._buffer = Buffer.alloc(0)
+ }
+
+ setTimeout(function () {
+ t.equal(ondataCalled, 1)
+ })
+
+ new TestReader().read()
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-compatibility'
diff --git a/src/test/browser/test-stream2-large-read-stall.js b/src/test/browser/test-stream2-large-read-stall.js
new file mode 100644
index 0000000000..17bb7fb2b9
--- /dev/null
+++ b/src/test/browser/test-stream2-large-read-stall.js
@@ -0,0 +1,63 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ // If everything aligns so that you do a read(n) of exactly the
+ // remaining buffer, then make sure that 'end' still emits.
+
+ const READSIZE = 100
+ const PUSHSIZE = 20
+ const PUSHCOUNT = 1000
+ const HWM = 50
+
+ const r = new Readable({
+ highWaterMark: HWM
+ })
+ const rs = r._readableState
+
+ r._read = push
+
+ r.on('readable', function () {
+ false && console.error('>> readable')
+ let ret
+ do {
+ false && console.error(' > read(%d)', READSIZE)
+ ret = r.read(READSIZE)
+ false && console.error(' < %j (%d remain)', ret && ret.length, rs.length)
+ } while (ret && ret.length === READSIZE)
+
+ false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length)
+ })
+
+ r.on('end', function () {
+ t.equal(pushes, PUSHCOUNT + 1)
+
+ false && console.error('end')
+ })
+
+ let pushes = 0
+ function push() {
+ if (pushes > PUSHCOUNT) {
+ return
+ }
+
+ if (pushes++ === PUSHCOUNT) {
+ false && console.error(' push(EOF)')
+ return r.push(null)
+ }
+
+ false && console.error(' push #%d', pushes)
+ if (r.push(Buffer.alloc(PUSHSIZE))) {
+ setTimeout(push)
+ }
+ }
+
+ // start the flow
+ r.read(0)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-large-read-stall'
diff --git a/src/test/browser/test-stream2-objects.js b/src/test/browser/test-stream2-objects.js
new file mode 100644
index 0000000000..c939b07fe6
--- /dev/null
+++ b/src/test/browser/test-stream2-objects.js
@@ -0,0 +1,309 @@
+'use strict'
+
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
+ }
+}
+
+function toArray(callback) {
+ const stream = new Writable({ objectMode: true })
+ const list = []
+ stream.write = function (chunk) {
+ list.push(chunk)
+ }
+
+ stream.end = function () {
+ callback(list)
+ }
+
+ return stream
+}
+
+function fromArray(list) {
+ const r = new Readable({ objectMode: true })
+ r._read = noop
+ forEach(list, function (chunk) {
+ r.push(chunk)
+ })
+ r.push(null)
+
+ return r
+}
+
+function noop() {}
+
+module.exports = function (test) {
+ test('can read objects from stream', function (t) {
+ t.plan(3)
+
+ const r = fromArray([{ one: '1' }, { two: '2' }])
+
+ const v1 = r.read()
+ const v2 = r.read()
+ const v3 = r.read()
+
+ t.deepEqual(v1, { one: '1' })
+ t.deepEqual(v2, { two: '2' })
+ t.deepEqual(v3, null)
+ })
+
+ test('can pipe objects into stream', function (t) {
+ t.plan(1)
+
+ const r = fromArray([{ one: '1' }, { two: '2' }])
+
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [{ one: '1' }, { two: '2' }])
+ })
+ )
+ })
+
+ test('read(n) is ignored', function (t) {
+ t.plan(1)
+
+ const r = fromArray([{ one: '1' }, { two: '2' }])
+
+ const value = r.read(2)
+
+ t.deepEqual(value, { one: '1' })
+ })
+
+ test('can read objects from _read (sync)', function (t) {
+ t.plan(1)
+
+ const r = new Readable({ objectMode: true })
+ const list = [{ one: '1' }, { two: '2' }]
+ r._read = function (n) {
+ const item = list.shift()
+ r.push(item || null)
+ }
+
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [{ one: '1' }, { two: '2' }])
+ })
+ )
+ })
+
+ test('can read objects from _read (async)', function (t) {
+ t.plan(1)
+
+ const r = new Readable({ objectMode: true })
+ const list = [{ one: '1' }, { two: '2' }]
+ r._read = function (n) {
+ const item = list.shift()
+ process.nextTick(function () {
+ r.push(item || null)
+ })
+ }
+
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [{ one: '1' }, { two: '2' }])
+ })
+ )
+ })
+
+ test('can read strings as objects', function (t) {
+ t.plan(1)
+
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+ const list = ['one', 'two', 'three']
+ forEach(list, function (str) {
+ r.push(str)
+ })
+ r.push(null)
+
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, list)
+ })
+ )
+ })
+
+ test('read(0) for object streams', function (t) {
+ t.plan(1)
+
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+
+ r.push('foobar')
+ r.push(null)
+
+ r.read(0)
+
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, ['foobar'])
+ })
+ )
+ })
+
+ test('falsey values', function (t) {
+ t.plan(1)
+
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+
+ r.push(false)
+ r.push(0)
+ r.push('')
+ r.push(null)
+
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, [false, 0, ''])
+ })
+ )
+ })
+
+ test('high watermark _read', function (t) {
+ t.plan(5)
+
+ const r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ })
+ let calls = 0
+ const list = ['1', '2', '3', '4', '5', '6', '7', '8']
+
+ r._read = function (n) {
+ calls++
+ }
+
+ forEach(list, function (c) {
+ r.push(c)
+ })
+
+ const v = r.read()
+
+ t.equal(calls, 0)
+ t.equal(v, '1')
+
+ const v2 = r.read()
+ t.equal(v2, '2')
+
+ const v3 = r.read()
+ t.equal(v3, '3')
+
+ t.equal(calls, 1)
+ })
+
+ test('high watermark push', function (t) {
+ t.plan(6)
+
+ const r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ })
+ r._read = function (n) {}
+ for (let i = 0; i < 6; i++) {
+ const bool = r.push(i)
+ t.equal(bool, i !== 5)
+ }
+ })
+
+ test('can write objects to stream', function (t) {
+ t.plan(1)
+
+ const w = new Writable({ objectMode: true })
+
+ w._write = function (chunk, encoding, cb) {
+ t.deepEqual(chunk, { foo: 'bar' })
+ cb()
+ }
+
+ w.on('finish', function () {})
+
+ w.write({ foo: 'bar' })
+ w.end()
+ })
+
+ test('can write multiple objects to stream', function (t) {
+ t.plan(1)
+
+ const w = new Writable({ objectMode: true })
+ const list = []
+
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ cb()
+ }
+
+ w.on('finish', function () {
+ t.deepEqual(list, [0, 1, 2, 3, 4])
+ })
+
+ w.write(0)
+ w.write(1)
+ w.write(2)
+ w.write(3)
+ w.write(4)
+ w.end()
+ })
+
+ test('can write strings as objects', function (t) {
+ t.plan(1)
+
+ const w = new Writable({
+ objectMode: true
+ })
+ const list = []
+
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ process.nextTick(cb)
+ }
+
+ w.on('finish', function () {
+ t.deepEqual(list, ['0', '1', '2', '3', '4'])
+ })
+
+ w.write('0')
+ w.write('1')
+ w.write('2')
+ w.write('3')
+ w.write('4')
+ w.end()
+ })
+
+ test('buffers finish until cb is called', function (t) {
+ t.plan(2)
+
+ const w = new Writable({
+ objectMode: true
+ })
+ let called = false
+
+ w._write = function (chunk, encoding, cb) {
+ t.equal(chunk, 'foo')
+
+ process.nextTick(function () {
+ called = true
+ cb()
+ })
+ }
+
+ w.on('finish', function () {
+ t.equal(called, true)
+ })
+
+ w.write('foo')
+ w.end()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-objects'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-pipe-error-handling.js b/src/test/browser/test-stream2-pipe-error-handling.js
new file mode 100644
index 0000000000..e830b9b415
--- /dev/null
+++ b/src/test/browser/test-stream2-pipe-error-handling.js
@@ -0,0 +1,95 @@
+'use strict'
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('Error Listener Catches', function (t) {
+ t.plan(3)
+
+ let count = 1000
+
+ const source = new stream.Readable()
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.alloc(n))
+ }
+
+ let unpipedDest
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
+
+ const dest = new stream.Writable()
+ dest._write = function (chunk, encoding, cb) {
+ cb()
+ }
+
+ source.pipe(dest)
+
+ let gotErr = null
+ dest.on('error', function (err) {
+ gotErr = err
+ })
+
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+
+ const err = new Error('This stream turned into bacon.')
+ dest.emit('error', err)
+ t.strictEqual(gotErr, err)
+ t.strictEqual(unpipedSource, source)
+ t.strictEqual(unpipedDest, dest)
+ })
+
+ test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) {
+ t.plan(3)
+
+ let count = 1000
+
+ const source = new stream.Readable()
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.alloc(n))
+ }
+
+ let unpipedDest
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
+
+ const dest = new stream.Writable()
+ dest._write = function (chunk, encoding, cb) {
+ cb()
+ }
+
+ source.pipe(dest)
+
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+
+ const err = new Error('This stream turned into bacon.')
+ const onerror = global.onerror
+
+ dest.emit('error', err)
+
+ global.onerror = () => {
+ t.ok(true)
+ t.strictEqual(unpipedSource, source)
+ t.strictEqual(unpipedDest, dest)
+ global.onerror = onerror
+ return true
+ }
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-handling'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-pipe-error-once-listener.js b/src/test/browser/test-stream2-pipe-error-once-listener.js
new file mode 100644
index 0000000000..230da9ad42
--- /dev/null
+++ b/src/test/browser/test-stream2-pipe-error-once-listener.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const Read = function () {
+ stream.Readable.call(this)
+ }
+ inherits(Read, stream.Readable)
+
+ Read.prototype._read = function (size) {
+ this.push('x')
+ this.push(null)
+ }
+
+ const Write = function () {
+ stream.Writable.call(this)
+ }
+ inherits(Write, stream.Writable)
+
+ Write.prototype._write = function (buffer, encoding, cb) {
+ this.emit('error', new Error('boom'))
+ this.emit('alldone')
+ }
+
+ const read = new Read()
+ const write = new Write()
+
+ write.once('error', () => {})
+ write.once('alldone', function () {
+ t.ok(true)
+ })
+
+ read.pipe(write)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-once-listener'
diff --git a/src/test/browser/test-stream2-push.js b/src/test/browser/test-stream2-push.js
new file mode 100644
index 0000000000..ce2916aaa1
--- /dev/null
+++ b/src/test/browser/test-stream2-push.js
@@ -0,0 +1,119 @@
+'use strict'
+
+const { EventEmitter: EE } = require('events')
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(33)
+
+ const stream = new Readable({
+ highWaterMark: 16,
+ encoding: 'utf8'
+ })
+
+ const source = new EE()
+
+ stream._read = function () {
+ // console.error('stream._read');
+ readStart()
+ }
+
+ let ended = false
+ stream.on('end', function () {
+ ended = true
+ })
+
+ source.on('data', function (chunk) {
+ const ret = stream.push(chunk)
+ // console.error('data', stream._readableState.length);
+ if (!ret) {
+ readStop()
+ }
+ })
+
+ source.on('end', function () {
+ stream.push(null)
+ })
+
+ let reading = false
+
+ function readStart() {
+ // console.error('readStart');
+ reading = true
+ }
+
+ function readStop() {
+ // console.error('readStop');
+ reading = false
+ process.nextTick(function () {
+ const r = stream.read()
+ if (r !== null) {
+ writer.write(r)
+ }
+ })
+ }
+
+ const writer = new Writable({
+ decodeStrings: false
+ })
+
+ const written = []
+
+ const expectWritten = [
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg'
+ ]
+
+ writer._write = function (chunk, encoding, cb) {
+ // console.error('WRITE %s', chunk);
+ written.push(chunk)
+ process.nextTick(cb)
+ }
+
+ writer.on('finish', finish)
+
+ // now emit some chunks.
+
+ const chunk = 'asdfg'
+
+ let set = 0
+ readStart()
+ data()
+ function data() {
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.notOk(reading)
+ if (set++ < 5) {
+ setTimeout(data, 10)
+ } else {
+ end()
+ }
+ }
+
+ function finish() {
+ // console.error('finish');
+ t.deepEqual(written, expectWritten)
+ }
+
+ function end() {
+ source.emit('end')
+ t.notOk(reading)
+ writer.end(stream.read())
+ setTimeout(function () {
+ t.ok(ended)
+ })
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-push'
diff --git a/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js
new file mode 100644
index 0000000000..35e27a2f2f
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js
@@ -0,0 +1,98 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('readable empty buffer no eof 1', function (t) {
+ t.plan(1)
+
+ const r = new Readable()
+
+ // should not end when we get a Buffer(0) or '' as the _read result
+ // that just means that there is *temporarily* no data, but to go
+ // ahead and try again later.
+ //
+ // note that this is very unusual. it only works for crypto streams
+ // because the other side of the stream will call read(0) to cycle
+ // data through openssl. that's why we set the timeouts to call
+ // r.read(0) again later, otherwise there is no more work being done
+ // and the process just exits.
+
+ const buf = Buffer.alloc(5)
+ buf.fill('x')
+ let reads = 5
+ r._read = function (n) {
+ switch (reads--) {
+ case 0:
+ return r.push(null) // EOF
+ case 1:
+ return r.push(buf)
+ case 2:
+ setTimeout(r.read.bind(r, 0), 50)
+ return r.push(Buffer.alloc(0)) // Not-EOF!
+ case 3:
+ setTimeout(r.read.bind(r, 0), 50)
+ return process.nextTick(function () {
+ return r.push(Buffer.alloc(0))
+ })
+ case 4:
+ setTimeout(r.read.bind(r, 0), 50)
+ return setTimeout(function () {
+ return r.push(Buffer.alloc(0))
+ })
+ case 5:
+ return setTimeout(function () {
+ return r.push(buf)
+ })
+ default:
+ throw new Error('unreachable')
+ }
+ }
+
+ const results = []
+ function flow() {
+ let chunk
+ while ((chunk = r.read()) !== null) {
+ results.push(chunk + '')
+ }
+ }
+ r.on('readable', flow)
+ r.on('end', function () {
+ results.push('EOF')
+ t.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF'])
+ })
+ flow()
+ })
+
+ test('readable empty buffer no eof 2', function (t) {
+ t.plan(1)
+
+ const r = new Readable({ encoding: 'base64' })
+ let reads = 5
+ r._read = function (n) {
+ if (!reads--) {
+ return r.push(null) // EOF
+ } else {
+ return r.push(Buffer.from('x'))
+ }
+ }
+
+ const results = []
+ function flow() {
+ let chunk
+ while ((chunk = r.read()) !== null) {
+ results.push(chunk + '')
+ }
+ }
+ r.on('readable', flow)
+ r.on('end', function () {
+ results.push('EOF')
+ t.deepEqual(results, ['eHh4', 'eHg=', 'EOF'])
+ })
+ flow()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-empty-buffer-no-eof'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-readable-from-list.js b/src/test/browser/test-stream2-readable-from-list.js
new file mode 100644
index 0000000000..f71984e9c4
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-from-list.js
@@ -0,0 +1,70 @@
+'use strict'
+
+const { _fromList: fromList } = require('../../lib/_stream_readable')
+const BufferList = require('../../lib/internal/streams/buffer_list')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+function bufferListFromArray(arr) {
+ const bl = new BufferList()
+ for (let i = 0; i < arr.length; ++i) {
+ bl.push(arr[i])
+ }
+ return bl
+}
+
+module.exports = function (test) {
+ test('buffers', function (t) {
+ t.plan(5)
+
+ let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')]
+ list = bufferListFromArray(list)
+
+ // read more than the first element.
+ let ret = fromList(6, { buffer: list, length: 16 })
+ t.equal(ret.toString(), 'foogba')
+
+ // read exactly the first element.
+ ret = fromList(2, { buffer: list, length: 10 })
+ t.equal(ret.toString(), 'rk')
+
+ // read less than the first element.
+ ret = fromList(2, { buffer: list, length: 8 })
+ t.equal(ret.toString(), 'ba')
+
+ // read more than we have.
+ ret = fromList(100, { buffer: list, length: 6 })
+ t.equal(ret.toString(), 'zykuel')
+
+ // all consumed.
+ t.same(list, new BufferList())
+ })
+
+ test('strings', function (t) {
+ t.plan(5)
+
+ let list = ['foog', 'bark', 'bazy', 'kuel']
+ list = bufferListFromArray(list)
+
+ // read more than the first element.
+ let ret = fromList(6, { buffer: list, length: 16, decoder: true })
+ t.equal(ret, 'foogba')
+
+ // read exactly the first element.
+ ret = fromList(2, { buffer: list, length: 10, decoder: true })
+ t.equal(ret, 'rk')
+
+ // read less than the first element.
+ ret = fromList(2, { buffer: list, length: 8, decoder: true })
+ t.equal(ret, 'ba')
+
+ // read more than we have.
+ ret = fromList(100, { buffer: list, length: 6, decoder: true })
+ t.equal(ret, 'zykuel')
+
+ // all consumed.
+ t.same(list, new BufferList())
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-from-list'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-readable-legacy-drain.js b/src/test/browser/test-stream2-readable-legacy-drain.js
new file mode 100644
index 0000000000..8cd09c2fc6
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-legacy-drain.js
@@ -0,0 +1,47 @@
+'use strict'
+
+const { Stream, Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(3)
+
+ const r = new Readable()
+ const N = 256
+ let reads = 0
+ r._read = function (n) {
+ return r.push(++reads === N ? null : Buffer.alloc(1))
+ }
+
+ r.on('end', function () {
+ t.ok(true, 'rended')
+ })
+
+ const w = new Stream()
+ w.writable = true
+ let writes = 0
+ let buffered = 0
+ w.write = function (c) {
+ writes += c.length
+ buffered += c.length
+ process.nextTick(drain)
+ return false
+ }
+
+ function drain() {
+ if (buffered > 3) {
+ t.ok(false, 'to much buffer')
+ }
+ buffered = 0
+ w.emit('drain')
+ }
+
+ w.end = function () {
+ t.equal(writes, 255)
+ t.ok(true, 'wended')
+ }
+
+ r.pipe(w)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-legacy-drain'
diff --git a/src/test/browser/test-stream2-readable-non-empty-end.js b/src/test/browser/test-stream2-readable-non-empty-end.js
new file mode 100644
index 0000000000..f9e2983142
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-non-empty-end.js
@@ -0,0 +1,60 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(4)
+
+ let len = 0
+ const chunks = new Array(10)
+ for (let i = 1; i <= 10; i++) {
+ chunks[i - 1] = Buffer.alloc(i)
+ len += i
+ }
+
+ const test = new Readable()
+ let n = 0
+ test._read = function (size) {
+ const chunk = chunks[n++]
+ setTimeout(function () {
+ test.push(chunk === undefined ? null : chunk)
+ })
+ }
+
+ test.on('end', thrower)
+ function thrower() {
+ throw new Error('this should not happen!')
+ }
+
+ let bytesread = 0
+ test.on('readable', function () {
+ const b = len - bytesread - 1
+ const res = test.read(b)
+ if (res) {
+ bytesread += res.length
+ // console.error('br=%d len=%d', bytesread, len);
+ setTimeout(next)
+ }
+ test.read(0)
+ })
+ test.read(0)
+
+ function next() {
+ // now let's make 'end' happen
+ test.removeListener('end', thrower)
+
+ test.on('end', function () {
+ t.ok(true, 'end emitted')
+ })
+
+ // one to get the last byte
+ let r = test.read()
+ t.ok(r)
+ t.equal(r.length, 1)
+ r = test.read()
+ t.equal(r, null)
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-non-empty-end'
diff --git a/src/test/browser/test-stream2-readable-wrap-empty.js b/src/test/browser/test-stream2-readable-wrap-empty.js
new file mode 100644
index 0000000000..7779ac91af
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-wrap-empty.js
@@ -0,0 +1,25 @@
+'use strict'
+
+const { EventEmitter: EE } = require('events')
+const Readable = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const oldStream = new EE()
+ oldStream.pause = function () {}
+ oldStream.resume = function () {}
+
+ const newStream = new Readable().wrap(oldStream)
+
+ newStream
+ .on('readable', function () {})
+ .on('end', function () {
+ t.ok(true, 'ended')
+ })
+
+ oldStream.emit('end')
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap-empty'
diff --git a/src/test/browser/test-stream2-readable-wrap.js b/src/test/browser/test-stream2-readable-wrap.js
new file mode 100644
index 0000000000..0dff5fb8f3
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-wrap.js
@@ -0,0 +1,99 @@
+'use strict'
+
+const { EventEmitter: EE } = require('events')
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+let run = 0
+
+module.exports = function (test) {
+ function runTest(highWaterMark, objectMode, produce) {
+ test('run #' + ++run, (t) => {
+ t.plan(4)
+
+ const old = new EE()
+ const r = new Readable({ highWaterMark, objectMode })
+ t.equal(r, r.wrap(old))
+
+ let ended = false
+ r.on('end', function () {
+ ended = true
+ })
+
+ old.pause = function () {
+ // console.error('old.pause()');
+ old.emit('pause')
+ flowing = false
+ }
+
+ old.resume = function () {
+ // console.error('old.resume()');
+ old.emit('resume')
+ flow()
+ }
+
+ let flowing
+ let chunks = 10
+ let oldEnded = false
+ const expected = []
+ function flow() {
+ flowing = true
+ // eslint-disable-next-line no-unmodified-loop-condition
+ while (flowing && chunks-- > 0) {
+ const item = produce()
+ expected.push(item)
+ // console.log('old.emit', chunks, flowing);
+ old.emit('data', item)
+ // console.log('after emit', chunks, flowing);
+ }
+ if (chunks <= 0) {
+ oldEnded = true
+ // console.log('old end', chunks, flowing);
+ old.emit('end')
+ }
+ }
+
+ const w = new Writable({ highWaterMark: highWaterMark * 2, objectMode })
+ const written = []
+ w._write = function (chunk, encoding, cb) {
+ // console.log('_write', chunk);
+ written.push(chunk)
+ setTimeout(cb)
+ }
+
+ w.on('finish', function () {
+ performAsserts()
+ })
+
+ r.pipe(w)
+
+ flow()
+
+ function performAsserts() {
+ t.ok(ended)
+ t.ok(oldEnded)
+ t.deepEqual(written, expected)
+ }
+ })
+ }
+
+ runTest(100, false, function () {
+ return Buffer.alloc(100)
+ })
+
+ runTest(10, false, function () {
+ return Buffer.from('xxxxxxxxxx')
+ })
+
+ runTest(1, true, function () {
+ return { foo: 'bar' }
+ })
+
+ const objectChunks = [5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555]
+ runTest(1, true, function () {
+ return objectChunks.shift()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-set-encoding.js b/src/test/browser/test-stream2-set-encoding.js
new file mode 100644
index 0000000000..3e092e2408
--- /dev/null
+++ b/src/test/browser/test-stream2-set-encoding.js
@@ -0,0 +1,340 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+inherits(TestReader, Readable)
+
+function TestReader(n, opts) {
+ Readable.call(this, opts)
+
+ this.pos = 0
+ this.len = n || 100
+}
+
+TestReader.prototype._read = function (n) {
+ setTimeout(
+ function () {
+ if (this.pos >= this.len) {
+ // double push(null) to test eos handling
+ this.push(null)
+ return this.push(null)
+ }
+
+ n = Math.min(n, this.len - this.pos)
+ if (n <= 0) {
+ // double push(null) to test eos handling
+ this.push(null)
+ return this.push(null)
+ }
+
+ this.pos += n
+ const ret = Buffer.alloc(n)
+ ret.fill('a')
+
+ // console.log('this.push(ret)', ret);
+
+ return this.push(ret)
+ }.bind(this),
+ 1
+ )
+}
+
+module.exports = function (test) {
+ test('setEncoding utf8', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100)
+ tr.setEncoding('utf8')
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('setEncoding hex', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('setEncoding hex with read(13)', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
+
+ tr.on('readable', function flow() {
+ // console.log('readable once');
+ let chunk
+ while ((chunk = tr.read(13)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ // console.log('END');
+ t.same(out, expect)
+ })
+ })
+
+ test('setEncoding base64', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100)
+ tr.setEncoding('base64')
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('encoding: utf8', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100, { encoding: 'utf8' })
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('encoding: hex', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100, { encoding: 'hex' })
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('encoding: hex with read(13)', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100, { encoding: 'hex' })
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(13)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('encoding: base64', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100, { encoding: 'base64' })
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('chainable', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100)
+ t.equal(tr.setEncoding('utf8'), tr)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-set-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-transform.js b/src/test/browser/test-stream2-transform.js
new file mode 100644
index 0000000000..e0168307bc
--- /dev/null
+++ b/src/test/browser/test-stream2-transform.js
@@ -0,0 +1,489 @@
+'use strict'
+
+const { PassThrough, Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('writable side consumption', function (t) {
+ t.plan(3)
+
+ const tx = new Transform({
+ highWaterMark: 10
+ })
+
+ let transformed = 0
+ tx._transform = function (chunk, encoding, cb) {
+ transformed += chunk.length
+ tx.push(chunk)
+ cb()
+ }
+
+ for (let i = 1; i <= 10; i++) {
+ tx.write(Buffer.alloc(i))
+ }
+ tx.end()
+
+ t.equal(tx._readableState.length, 10)
+ t.equal(transformed, 10)
+ t.same(
+ tx._writableState.getBuffer().map(function (c) {
+ return c.chunk.length
+ }),
+ [5, 6, 7, 8, 9, 10]
+ )
+ })
+
+ test('passthrough', function (t) {
+ t.plan(4)
+
+ const pt = new PassThrough()
+
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5).toString(), 'l')
+ })
+
+ test('object passthrough', function (t) {
+ t.plan(7)
+
+ const pt = new PassThrough({ objectMode: true })
+
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({ a: 'b' })
+ pt.end()
+
+ t.equal(pt.read(), 1)
+ t.equal(pt.read(), true)
+ t.equal(pt.read(), false)
+ t.equal(pt.read(), 0)
+ t.equal(pt.read(), 'foo')
+ t.equal(pt.read(), '')
+ t.same(pt.read(), { a: 'b' })
+ })
+
+ test('simple transform', function (t) {
+ t.plan(4)
+
+ const pt = new Transform()
+ pt._transform = function (c, e, cb) {
+ const ret = Buffer.alloc(c.length)
+ ret.fill('x')
+ pt.push(ret)
+ cb()
+ }
+
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'x')
+ })
+
+ test('simple object transform', function (t) {
+ t.plan(7)
+
+ const pt = new Transform({ objectMode: true })
+ pt._transform = function (c, e, cb) {
+ pt.push(JSON.stringify(c))
+ cb()
+ }
+
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({ a: 'b' })
+ pt.end()
+
+ t.equal(pt.read(), '1')
+ t.equal(pt.read(), 'true')
+ t.equal(pt.read(), 'false')
+ t.equal(pt.read(), '0')
+ t.equal(pt.read(), '"foo"')
+ t.equal(pt.read(), '""')
+ t.equal(pt.read(), '{"a":"b"}')
+ })
+
+ test('async passthrough', function (t) {
+ t.plan(4)
+
+ const pt = new Transform()
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }
+
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5).toString(), 'l')
+ })
+ })
+
+ test('assymetric transform (expand)', function (t) {
+ t.plan(7)
+
+ const pt = new Transform()
+
+ // emit each chunk 2 times.
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
+ pt.push(chunk)
+ setTimeout(function () {
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }, 10)
+ }
+
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'foogf')
+ t.equal(pt.read(5).toString(), 'oogba')
+ t.equal(pt.read(5).toString(), 'rkbar')
+ t.equal(pt.read(5).toString(), 'kbazy')
+ t.equal(pt.read(5).toString(), 'bazyk')
+ t.equal(pt.read(5).toString(), 'uelku')
+ t.equal(pt.read(5).toString(), 'el')
+ })
+ })
+
+ test('assymetric transform (compress)', function (t) {
+ t.plan(3)
+
+ const pt = new Transform()
+
+ // each output is the first char of 3 consecutive chunks,
+ // or whatever's left.
+ pt.state = ''
+
+ pt._transform = function (chunk, encoding, cb) {
+ if (!chunk) {
+ chunk = ''
+ }
+ const s = chunk.toString()
+ setTimeout(
+ function () {
+ this.state += s.charAt(0)
+ if (this.state.length === 3) {
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ }
+ cb()
+ }.bind(this),
+ 10
+ )
+ }
+
+ pt._flush = function (cb) {
+ // just output whatever we have.
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ cb()
+ }
+
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.end()
+
+ // 'abcdeabcdeabcd'
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'abcde')
+ t.equal(pt.read(5).toString(), 'abcde')
+ t.equal(pt.read(5).toString(), 'abcd')
+ })
+ })
+
+ // this tests for a stall when data is written to a full stream
+ // that has empty transforms.
+ test('complex transform', function (t) {
+ t.plan(2)
+
+ let count = 0
+ let saved = null
+ const pt = new Transform({ highWaterMark: 3 })
+ pt._transform = function (c, e, cb) {
+ if (count++ === 1) {
+ saved = c
+ } else {
+ if (saved) {
+ pt.push(saved)
+ saved = null
+ }
+ pt.push(c)
+ }
+
+ cb()
+ }
+
+ pt.once('readable', function () {
+ process.nextTick(function () {
+ pt.write(Buffer.from('d'))
+ pt.write(Buffer.from('ef'), function () {
+ pt.end()
+ })
+ t.equal(pt.read().toString(), 'abcdef')
+ t.equal(pt.read(), null)
+ })
+ })
+
+ pt.write(Buffer.from('abc'))
+ })
+
+ test('passthrough event emission', function (t) {
+ t.plan(11)
+
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ // console.error('>>> emit readable %d', emits);
+ emits++
+ })
+
+ pt.write(Buffer.from('foog'))
+
+ // console.error('need emit 0');
+ pt.write(Buffer.from('bark'))
+
+ setTimeout(() => {
+ // console.error('should have emitted readable now 1 === %d', emits)
+ t.equal(emits, 1)
+
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5) + '', 'null')
+
+ // console.error('need emit 1');
+
+ pt.write(Buffer.from('bazy'))
+ // console.error('should have emitted, but not again');
+ pt.write(Buffer.from('kuel'))
+
+ // console.error('should have emitted readable now 2 === %d', emits);
+ setTimeout(() => {
+ t.equal(emits, 2)
+
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5), null)
+
+ // console.error('need emit 2');
+
+ pt.end()
+
+ setTimeout(() => {
+ t.equal(emits, 3)
+
+ t.equal(pt.read(5).toString(), 'l')
+ t.equal(pt.read(5), null)
+
+ // console.error('should not have emitted again');
+ t.equal(emits, 3)
+ })
+ })
+ })
+ })
+
+ test('passthrough event emission reordered', function (t) {
+ t.plan(10)
+
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ // console.error('emit readable', emits);
+ emits++
+ })
+
+ pt.write(Buffer.from('foog'))
+ // console.error('need emit 0');
+ pt.write(Buffer.from('bark'))
+
+ setTimeout(() => {
+ // console.error('should have emitted readable now 1 === %d', emits);
+ t.equal(emits, 1)
+
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5), null)
+
+ // console.error('need emit 1');
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'arkba')
+
+ t.equal(pt.read(5), null)
+
+ // console.error('need emit 2');
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5), null)
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'l')
+ t.equal(pt.read(5), null)
+ t.equal(emits, 4)
+ })
+ pt.end()
+ })
+ pt.write(Buffer.from('kuel'))
+ })
+
+ pt.write(Buffer.from('bazy'))
+ })
+ })
+
+ test('passthrough facaded', function (t) {
+ t.plan(1)
+
+ // console.error('passthrough facaded');
+ const pt = new PassThrough()
+ const datas = []
+ pt.on('data', function (chunk) {
+ datas.push(chunk.toString())
+ })
+
+ pt.on('end', function () {
+ t.same(datas, ['foog', 'bark', 'bazy', 'kuel'])
+ })
+
+ pt.write(Buffer.from('foog'))
+ setTimeout(function () {
+ pt.write(Buffer.from('bark'))
+ setTimeout(function () {
+ pt.write(Buffer.from('bazy'))
+ setTimeout(function () {
+ pt.write(Buffer.from('kuel'))
+ setTimeout(function () {
+ pt.end()
+ }, 10)
+ }, 10)
+ }, 10)
+ }, 10)
+ })
+
+ test('object transform (json parse)', function (t) {
+ t.plan(5)
+
+ // console.error('json parse stream');
+ const jp = new Transform({ objectMode: true })
+ jp._transform = function (data, encoding, cb) {
+ try {
+ jp.push(JSON.parse(data))
+ cb()
+ } catch (er) {
+ cb(er)
+ }
+ }
+
+ // anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }]
+
+ let ended = false
+ jp.on('end', function () {
+ ended = true
+ })
+
+ forEach(objects, function (obj) {
+ jp.write(JSON.stringify(obj))
+ const res = jp.read()
+ t.same(res, obj)
+ })
+
+ jp.end()
+ // read one more time to get the 'end' event
+ jp.read()
+
+ process.nextTick(function () {
+ t.ok(ended)
+ })
+ })
+
+ test('object transform (json stringify)', function (t) {
+ t.plan(5)
+
+ // console.error('json parse stream');
+ const js = new Transform({ objectMode: true })
+ js._transform = function (data, encoding, cb) {
+ try {
+ js.push(JSON.stringify(data))
+ cb()
+ } catch (er) {
+ cb(er)
+ }
+ }
+
+ // anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }]
+
+ let ended = false
+ js.on('end', function () {
+ ended = true
+ })
+
+ forEach(objects, function (obj) {
+ js.write(obj)
+ const res = js.read()
+ t.equal(res, JSON.stringify(obj))
+ })
+
+ js.end()
+ // read one more time to get the 'end' event
+ js.read()
+
+ process.nextTick(function () {
+ t.ok(ended)
+ })
+ })
+
+ function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
+ }
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-transform'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-unpipe-drain.js b/src/test/browser/test-stream2-unpipe-drain.js
new file mode 100644
index 0000000000..7d3192eec1
--- /dev/null
+++ b/src/test/browser/test-stream2-unpipe-drain.js
@@ -0,0 +1,65 @@
+'use strict'
+
+const crypto = require('crypto')
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ try {
+ crypto.randomBytes(9)
+ } catch (_) {
+ t.plan(1)
+ t.ok(true, 'does not suport random, skipping')
+ return
+ }
+
+ t.plan(2)
+
+ function TestWriter() {
+ stream.Writable.call(this)
+ }
+ inherits(TestWriter, stream.Writable)
+
+ TestWriter.prototype._write = function (buffer, encoding, callback) {
+ // console.log('write called');
+ // super slow write stream (callback never called)
+ }
+
+ const dest = new TestWriter()
+
+ function TestReader(id) {
+ stream.Readable.call(this)
+ this.reads = 0
+ }
+ inherits(TestReader, stream.Readable)
+
+ TestReader.prototype._read = function (size) {
+ this.reads += 1
+ this.push(crypto.randomBytes(size))
+ }
+
+ const src1 = new TestReader()
+ const src2 = new TestReader()
+
+ src1.pipe(dest)
+
+ src1.once('readable', function () {
+ process.nextTick(function () {
+ src2.pipe(dest)
+
+ src2.once('readable', function () {
+ process.nextTick(function () {
+ src1.unpipe(dest)
+ })
+ })
+ })
+ })
+
+ dest.on('unpipe', function () {
+ t.equal(src1.reads, 2)
+ t.equal(src2.reads, 1)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-unpipe-drain'
diff --git a/src/test/browser/test-stream2-writable.js b/src/test/browser/test-stream2-writable.js
new file mode 100644
index 0000000000..20d4e9dc5e
--- /dev/null
+++ b/src/test/browser/test-stream2-writable.js
@@ -0,0 +1,445 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Duplex, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+inherits(TestWriter, Writable)
+
+function TestWriter() {
+ Writable.apply(this, arguments)
+ this.buffer = []
+ this.written = 0
+}
+
+TestWriter.prototype._write = function (chunk, encoding, cb) {
+ // simulate a small unpredictable latency
+ setTimeout(
+ function () {
+ this.buffer.push(chunk.toString())
+ this.written += chunk.length
+ cb()
+ }.bind(this),
+ Math.floor(Math.random() * 10)
+ )
+}
+inherits(Processstdout, Writable)
+
+function Processstdout() {
+ Writable.apply(this, arguments)
+ this.buffer = []
+ this.written = 0
+}
+
+Processstdout.prototype._write = function (chunk, encoding, cb) {
+ // console.log(chunk.toString());
+ cb()
+}
+const chunks = new Array(50)
+for (let i = 0; i < chunks.length; i++) {
+ chunks[i] = new Array(i + 1).join('x')
+}
+
+if (!process.stdout) {
+ process.stdout = new Processstdout()
+}
+
+module.exports = function (test) {
+ test('write fast', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ })
+
+ forEach(chunks, function (chunk) {
+ // screw backpressure. Just buffer it all up.
+ tw.write(chunk)
+ })
+ tw.end()
+ })
+
+ test('write slow', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ })
+
+ let i = 0
+ ;(function W() {
+ tw.write(chunks[i++])
+ if (i < chunks.length) {
+ setTimeout(W, 10)
+ } else {
+ tw.end()
+ }
+ })()
+ })
+
+ test('write backpressure', function (t) {
+ t.plan(19)
+
+ const tw = new TestWriter({
+ highWaterMark: 50
+ })
+
+ let drains = 0
+
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ t.equal(drains, 17)
+ })
+
+ tw.on('drain', function () {
+ drains++
+ })
+
+ let i = 0
+ ;(function W() {
+ let ret
+ do {
+ ret = tw.write(chunks[i++])
+ } while (ret !== false && i < chunks.length)
+
+ if (i < chunks.length) {
+ t.ok(tw._writableState.length >= 50)
+ tw.once('drain', W)
+ } else {
+ tw.end()
+ }
+ })()
+ })
+
+ test('write bufferize', function (t) {
+ t.plan(50)
+
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+
+ tw.on('finish', function () {
+ forEach(chunks, function (chunk, i) {
+ const actual = Buffer.from(tw.buffer[i])
+ chunk = Buffer.from(chunk)
+
+ // Some combination of encoding and length result in the last byte replaced by two extra null bytes
+ if (actual[actual.length - 1] === 0) {
+ chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])])
+ }
+
+ // In some cases instead there is one byte less
+ if (actual.length === chunk.length - 1) {
+ chunk = chunk.slice(0, chunk.length - 1)
+ }
+
+ t.same(actual, chunk, 'got the expected chunks ' + i)
+ })
+ })
+
+ forEach(chunks, function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
+ tw.end()
+ })
+
+ test('write no bufferize', function (t) {
+ t.plan(100)
+
+ const tw = new TestWriter({
+ highWaterMark: 100,
+ decodeStrings: false
+ })
+
+ tw._write = function (chunk, encoding, cb) {
+ t.equals(typeof chunk, 'string')
+ chunk = Buffer.from(chunk, encoding)
+ return TestWriter.prototype._write.call(this, chunk, encoding, cb)
+ }
+
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+
+ tw.on('finish', function () {
+ forEach(chunks, function (chunk, i) {
+ const actual = Buffer.from(tw.buffer[i])
+ chunk = Buffer.from(chunk)
+
+ // Some combination of encoding and length result in the last byte replaced by two extra null bytes
+ if (actual[actual.length - 1] === 0) {
+ chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])])
+ }
+
+ // In some cases instead there is one byte less
+ if (actual.length === chunk.length - 1) {
+ chunk = chunk.slice(0, chunk.length - 1)
+ }
+
+ t.same(actual, chunk, 'got the expected chunks ' + i)
+ })
+ })
+
+ forEach(chunks, function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
+ tw.end()
+ })
+
+ test('write callbacks', function (t) {
+ t.plan(2)
+
+ const callbacks = chunks
+ .map(function (chunk, i) {
+ return [
+ i,
+ function (er) {
+ callbacks._called[i] = chunk
+ }
+ ]
+ })
+ .reduce(function (set, x) {
+ set['callback-' + x[0]] = x[1]
+ return set
+ }, {})
+ callbacks._called = []
+
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+
+ tw.on('finish', function () {
+ process.nextTick(function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ t.same(callbacks._called, chunks, 'called all callbacks')
+ })
+ })
+
+ forEach(chunks, function (chunk, i) {
+ tw.write(chunk, callbacks['callback-' + i])
+ })
+ tw.end()
+ })
+
+ test('end callback', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter()
+ tw.end(() => {
+ t.ok(true)
+ })
+ })
+
+ test('end callback with chunk', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter()
+ tw.end(Buffer.from('hello world'), () => {
+ t.ok(true)
+ })
+ })
+
+ test('end callback with chunk and encoding', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter()
+ tw.end('hello world', 'ascii', () => {
+ t.ok(true)
+ })
+ })
+
+ test('end callback after .write() call', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter()
+ tw.write(Buffer.from('hello world'))
+ tw.end(() => {
+ t.ok(true)
+ })
+ })
+
+ test('end callback called after write callback', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter()
+ let writeCalledback = false
+ tw.write(Buffer.from('hello world'), function () {
+ writeCalledback = true
+ })
+ tw.end(function () {
+ t.equal(writeCalledback, true)
+ })
+ })
+
+ test('encoding should be ignored for buffers', function (t) {
+ t.plan(1)
+
+ const tw = new Writable()
+ const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'
+ tw._write = function (chunk, encoding, cb) {
+ t.equal(chunk.toString('hex'), hex)
+ }
+ const buf = Buffer.from(hex, 'hex')
+ tw.write(buf, 'binary')
+ })
+
+ test('writables are not pipable', function (t) {
+ t.plan(1)
+
+ const w = new Writable({ autoDestroy: false })
+ w._write = function () {}
+ let gotError = false
+ w.on('error', function (er) {
+ gotError = true
+ })
+ w.pipe(process.stdout)
+ t.ok(gotError)
+ })
+
+ test('duplexes are pipable', function (t) {
+ t.plan(1)
+
+ const d = new Duplex()
+ d._read = function () {}
+ d._write = function () {}
+ let gotError = false
+ d.on('error', function (er) {
+ gotError = true
+ })
+ d.pipe(process.stdout)
+ t.notOk(gotError)
+ })
+
+ test('end(chunk) two times is an error', function (t) {
+ t.plan(2)
+
+ const w = new Writable()
+ w._write = function () {}
+ let gotError = false
+ w.on('error', function (er) {
+ gotError = true
+ t.equal(er.message, 'write after end')
+ })
+ w.end('this is the end')
+ w.end('and so is this')
+ process.nextTick(function () {
+ t.ok(gotError)
+ })
+ })
+
+ test('dont end while writing', function (t) {
+ t.plan(2)
+
+ const w = new Writable()
+ let wrote = false
+ w._write = function (chunk, e, cb) {
+ t.notOk(this.writing)
+ wrote = true
+ this.writing = true
+ setTimeout(function () {
+ this.writing = false
+ cb()
+ })
+ }
+ w.on('finish', function () {
+ t.ok(wrote)
+ })
+ w.write(Buffer.alloc(0))
+ w.end()
+ })
+
+ test('finish does not come before write cb', function (t) {
+ t.plan(1)
+
+ const w = new Writable()
+ let writeCb = false
+ w._write = function (chunk, e, cb) {
+ setTimeout(function () {
+ writeCb = true
+ cb()
+ }, 10)
+ }
+ w.on('finish', function () {
+ t.ok(writeCb)
+ })
+ w.write(Buffer.alloc(0))
+ w.end()
+ })
+
+ test('finish does not come before sync _write cb', function (t) {
+ t.plan(1)
+
+ const w = new Writable()
+ let writeCb = false
+ w._write = function (chunk, e, cb) {
+ cb()
+ }
+ w.on('finish', function () {
+ t.ok(writeCb)
+ })
+ w.write(Buffer.alloc(0), function (er) {
+ writeCb = true
+ })
+ w.end()
+ })
+
+ test('finish is emitted if last chunk is empty', function (t) {
+ t.plan(1)
+
+ const w = new Writable()
+ w._write = function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
+ w.on('finish', () => {
+ t.ok(true)
+ })
+
+ w.write(Buffer.alloc(1))
+ w.end(Buffer.alloc(0))
+ })
+
+ function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
+ }
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-writable'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream3-pause-then-read.js b/src/test/browser/test-stream3-pause-then-read.js
new file mode 100644
index 0000000000..6b4399f656
--- /dev/null
+++ b/src/test/browser/test-stream3-pause-then-read.js
@@ -0,0 +1,149 @@
+'use strict'
+
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(7)
+
+ const totalChunks = 100
+ const chunkSize = 99
+ const expectTotalData = totalChunks * chunkSize
+ let expectEndingData = expectTotalData
+
+ const r = new Readable({ highWaterMark: 1000 })
+ let chunks = totalChunks
+ r._read = function (n) {
+ if (!(chunks % 2)) {
+ setImmediate(push)
+ } else if (!(chunks % 3)) {
+ process.nextTick(push)
+ } else {
+ push()
+ }
+ }
+
+ let totalPushed = 0
+ function push() {
+ const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize) : null
+ if (chunk) {
+ totalPushed += chunk.length
+ chunk.fill('x')
+ }
+ r.push(chunk)
+ }
+
+ read100()
+
+ // first we read 100 bytes
+ function read100() {
+ readn(100, onData)
+ }
+
+ function readn(n, then) {
+ // console.error('read %d', n);
+ expectEndingData -= n
+ ;(function read() {
+ const c = r.read(n)
+ if (!c) {
+ r.once('readable', read)
+ } else {
+ t.equal(c.length, n)
+ t.notOk(r._readableState.flowing)
+ then()
+ }
+ })()
+ }
+
+ // then we listen to some data events
+ function onData() {
+ expectEndingData -= 100
+ // console.error('onData');
+ let seen = 0
+ r.on('data', function od(c) {
+ seen += c.length
+ if (seen >= 100) {
+ // seen enough
+ r.removeListener('data', od)
+ r.pause()
+ if (seen > 100) {
+ // oh no, seen too much!
+ // put the extra back.
+ const diff = seen - 100
+ r.unshift(c.slice(c.length - diff))
+ // console.error('seen too much', seen, diff)
+ }
+
+ // Nothing should be lost in between
+ setImmediate(pipeLittle)
+ }
+ })
+ }
+
+ // Just pipe 200 bytes, then unshift the extra and unpipe
+ function pipeLittle() {
+ expectEndingData -= 200
+ // console.error('pipe a little');
+ const w = new Writable()
+ let written = 0
+ w.on('finish', function () {
+ t.equal(written, 200)
+ setImmediate(read1234)
+ })
+ w._write = function (chunk, encoding, cb) {
+ written += chunk.length
+ if (written >= 200) {
+ r.unpipe(w)
+ w.end()
+ cb()
+ if (written > 200) {
+ const diff = written - 200
+ written -= diff
+ r.unshift(chunk.slice(chunk.length - diff))
+ }
+ } else {
+ setImmediate(cb)
+ }
+ }
+ r.pipe(w)
+ }
+
+ // now read 1234 more bytes
+ function read1234() {
+ readn(1234, resumePause)
+ }
+
+ function resumePause() {
+ // console.error('resumePause');
+ // don't read anything, just resume and re-pause a whole bunch
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ setImmediate(pipe)
+ }
+
+ function pipe() {
+ // console.error('pipe the rest');
+ const w = new Writable()
+ let written = 0
+ w._write = function (chunk, encoding, cb) {
+ written += chunk.length
+ cb()
+ }
+ w.on('finish', function () {
+ // console.error('written', written, totalPushed);
+ t.equal(written, expectEndingData)
+ t.equal(totalPushed, expectTotalData)
+ })
+ r.pipe(w)
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream3-pause-then-read'
diff --git a/test/ours/errors.js b/src/test/ours/test-errors.js
similarity index 53%
rename from test/ours/errors.js
rename to src/test/ours/test-errors.js
index fc32605923..a300f17075 100644
--- a/test/ours/errors.js
+++ b/src/test/ours/test-errors.js
@@ -1,148 +1,132 @@
-var tap = require('tap');
-var assert = require('assert');
-var errors = require('../../errors').codes;
-
-function expect (err, Base, name, code, message) {
- assert(err instanceof Base);
- assert.strictEqual(err.name, name);
- assert.strictEqual(err.code, code);
- assert.strictEqual(err.message, message);
+'use strict'
+
+const t = require('tap')
+const { codes: errors } = require('../../lib/ours/errors')
+
+function checkError(err, Base, name, code, message) {
+ t.ok(err instanceof Base)
+ t.equal(err.name, name)
+ t.equal(err.code, code)
+ t.equal(err.message, message)
}
-expect(
- new errors.ERR_INVALID_OPT_VALUE('name', 0),
+// Update this numbers based on the number of checkError below multiplied by the assertions within checkError
+t.plan(17 * 4)
+
+checkError(
+ new errors.ERR_INVALID_ARG_VALUE('name', 0),
TypeError,
'TypeError',
- 'ERR_INVALID_OPT_VALUE',
- 'The value "0" is invalid for option "name"'
-);
+ 'ERR_INVALID_ARG_VALUE',
+ "The argument 'name' is invalid. Received 0"
+)
-expect(
- new errors.ERR_INVALID_OPT_VALUE('name', undefined),
+checkError(
+ new errors.ERR_INVALID_ARG_VALUE('name', undefined),
TypeError,
'TypeError',
- 'ERR_INVALID_OPT_VALUE',
- 'The value "undefined" is invalid for option "name"'
-);
+ 'ERR_INVALID_ARG_VALUE',
+ "The argument 'name' is invalid. Received undefined"
+)
-expect(
+checkError(
new errors.ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], 0),
TypeError,
'TypeError',
'ERR_INVALID_ARG_TYPE',
- 'The "chunk" argument must be one of type string, Buffer, or Uint8Array. Received type number'
-);
+ 'The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received type number (0)'
+)
-expect(
+checkError(
new errors.ERR_INVALID_ARG_TYPE('first argument', 'not string', 'foo'),
TypeError,
'TypeError',
'ERR_INVALID_ARG_TYPE',
- 'The first argument must not be of type string. Received type string'
-);
+ "The first argument must be not string. Received type string ('foo')"
+)
-expect(
+checkError(
new errors.ERR_INVALID_ARG_TYPE('obj.prop', 'string', undefined),
TypeError,
'TypeError',
'ERR_INVALID_ARG_TYPE',
- 'The "obj.prop" property must be of type string. Received type undefined'
-);
+ 'The "obj.prop" property must be of type string. Received undefined'
+)
-expect(
+checkError(
new errors.ERR_STREAM_PUSH_AFTER_EOF(),
Error,
'Error',
'ERR_STREAM_PUSH_AFTER_EOF',
'stream.push() after EOF'
-);
+)
-expect(
+checkError(
new errors.ERR_METHOD_NOT_IMPLEMENTED('_read()'),
Error,
'Error',
'ERR_METHOD_NOT_IMPLEMENTED',
'The _read() method is not implemented'
-);
+)
-expect(
+checkError(
new errors.ERR_METHOD_NOT_IMPLEMENTED('_write()'),
Error,
'Error',
'ERR_METHOD_NOT_IMPLEMENTED',
'The _write() method is not implemented'
-);
+)
-expect(
- new errors.ERR_STREAM_PREMATURE_CLOSE(),
- Error,
- 'Error',
- 'ERR_STREAM_PREMATURE_CLOSE',
- 'Premature close'
-);
+checkError(new errors.ERR_STREAM_PREMATURE_CLOSE(), Error, 'Error', 'ERR_STREAM_PREMATURE_CLOSE', 'Premature close')
-expect(
+checkError(
new errors.ERR_STREAM_DESTROYED('pipe'),
Error,
'Error',
'ERR_STREAM_DESTROYED',
'Cannot call pipe after a stream was destroyed'
-);
+)
-expect(
+checkError(
new errors.ERR_STREAM_DESTROYED('write'),
Error,
'Error',
'ERR_STREAM_DESTROYED',
'Cannot call write after a stream was destroyed'
-);
+)
-expect(
+checkError(
new errors.ERR_MULTIPLE_CALLBACK(),
Error,
'Error',
'ERR_MULTIPLE_CALLBACK',
'Callback called multiple times'
-);
+)
-expect(
- new errors.ERR_STREAM_CANNOT_PIPE(),
- Error,
- 'Error',
- 'ERR_STREAM_CANNOT_PIPE',
- 'Cannot pipe, not readable'
-);
+checkError(new errors.ERR_STREAM_CANNOT_PIPE(), Error, 'Error', 'ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable')
-expect(
- new errors.ERR_STREAM_WRITE_AFTER_END(),
- Error,
- 'Error',
- 'ERR_STREAM_WRITE_AFTER_END',
- 'write after end'
-);
+checkError(new errors.ERR_STREAM_WRITE_AFTER_END(), Error, 'Error', 'ERR_STREAM_WRITE_AFTER_END', 'write after end')
-expect(
+checkError(
new errors.ERR_STREAM_NULL_VALUES(),
TypeError,
'TypeError',
'ERR_STREAM_NULL_VALUES',
'May not write null values to stream'
-);
+)
-expect(
+checkError(
new errors.ERR_UNKNOWN_ENCODING('foo'),
TypeError,
'TypeError',
'ERR_UNKNOWN_ENCODING',
'Unknown encoding: foo'
-);
+)
-expect(
+checkError(
new errors.ERR_STREAM_UNSHIFT_AFTER_END_EVENT(),
Error,
'Error',
'ERR_STREAM_UNSHIFT_AFTER_END_EVENT',
'stream.unshift() after end event'
-);
-
-require('tap').pass('sync done');
+)
diff --git a/src/test/ours/test-fake-timers.js b/src/test/ours/test-fake-timers.js
new file mode 100644
index 0000000000..be95e071a7
--- /dev/null
+++ b/src/test/ours/test-fake-timers.js
@@ -0,0 +1,40 @@
+'use strict'
+
+require('../common')
+const t = require('tap')
+const util = require('util')
+const fakeTimers = require('@sinonjs/fake-timers')
+const Transform = require('../../lib/ours/index').Transform
+
+t.plan(1)
+
+function MyTransform() {
+ Transform.call(this)
+}
+
+util.inherits(MyTransform, Transform)
+
+const clock = fakeTimers.install({ toFake: ['setImmediate', 'nextTick'] })
+let stream2DataCalled = false
+
+const stream = new MyTransform()
+stream.on('data', function () {
+ stream.on('end', function () {
+ const stream2 = new MyTransform()
+ stream2.on('data', function () {
+ stream2.on('end', function () {
+ stream2DataCalled = true
+ })
+ setImmediate(function () {
+ stream2.end()
+ })
+ })
+ stream2.emit('data')
+ })
+ stream.end()
+})
+stream.emit('data')
+
+clock.runAll()
+clock.uninstall()
+t.ok(stream2DataCalled)
diff --git a/src/test/ours/test-stream-sync-write.js b/src/test/ours/test-stream-sync-write.js
new file mode 100644
index 0000000000..a12085a238
--- /dev/null
+++ b/src/test/ours/test-stream-sync-write.js
@@ -0,0 +1,44 @@
+'use strict'
+
+require('../common')
+const t = require('tap')
+const util = require('util')
+const stream = require('../../lib/ours/index')
+const WritableStream = stream.Writable
+
+t.plan(1)
+
+const InternalStream = function () {
+ WritableStream.call(this)
+}
+util.inherits(InternalStream, WritableStream)
+
+let invocations = 0
+InternalStream.prototype._write = function (chunk, encoding, callback) {
+ callback()
+}
+
+const internalStream = new InternalStream()
+
+const ExternalStream = function (writable) {
+ this._writable = writable
+ WritableStream.call(this)
+}
+util.inherits(ExternalStream, WritableStream)
+
+ExternalStream.prototype._write = function (chunk, encoding, callback) {
+ this._writable.write(chunk, encoding, callback)
+}
+
+const externalStream = new ExternalStream(internalStream)
+
+for (let i = 0; i < 2000; i++) {
+ externalStream.write(i.toString(), () => {
+ invocations++
+ })
+}
+
+externalStream.end()
+externalStream.on('finish', () => {
+ t.equal(invocations, 2000)
+})
diff --git a/src/util.js b/src/util.js
new file mode 100644
index 0000000000..5f788409a1
--- /dev/null
+++ b/src/util.js
@@ -0,0 +1,138 @@
+'use strict'
+
+const bufferModule = require('buffer')
+
+const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor
+const Blob = globalThis.Blob || bufferModule.Blob
+/* eslint-disable indent */
+const isBlob =
+ typeof Blob !== 'undefined'
+ ? function isBlob(b) {
+ // eslint-disable-next-line indent
+ return b instanceof Blob
+ }
+ : function isBlob(b) {
+ return false
+ }
+/* eslint-enable indent */
+
+// This is a simplified version of AggregateError
+class AggregateError extends Error {
+ constructor(errors) {
+ if (!Array.isArray(errors)) {
+ throw new TypeError(`Expected input to be an Array, got ${typeof errors}`)
+ }
+
+ let message = ''
+ for (let i = 0; i < errors.length; i++) {
+ message += ` ${errors[i].stack}\n`
+ }
+
+ super(message)
+ this.name = 'AggregateError'
+ this.errors = errors
+ }
+}
+
+module.exports = {
+ AggregateError,
+ once(callback) {
+ let called = false
+ return function (...args) {
+ if (called) {
+ return
+ }
+
+ called = true
+ callback.apply(this, args)
+ }
+ },
+ createDeferredPromise: function () {
+ let resolve
+ let reject
+
+ // eslint-disable-next-line promise/param-names
+ const promise = new Promise((res, rej) => {
+ resolve = res
+ reject = rej
+ })
+ return {
+ promise,
+ resolve,
+ reject
+ }
+ },
+ promisify(fn) {
+ return new Promise((resolve, reject) => {
+ fn((err, ...args) => {
+ if (err) {
+ return reject(err)
+ }
+
+ return resolve(...args)
+ })
+ })
+ },
+ debuglog() {
+ return function () {}
+ },
+ format(format, ...args) {
+ // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args
+ return format.replace(/%([sdifj])/g, function (...[_unused, type]) {
+ const replacement = args.shift()
+
+ if (type === 'f') {
+ return replacement.toFixed(6)
+ } else if (type === 'j') {
+ return JSON.stringify(replacement)
+ } else if (type === 's' && typeof replacement === 'object') {
+ const ctor = replacement.constructor !== Object ? replacement.constructor.name : ''
+ return `${ctor} {}`.trim()
+ } else {
+ return replacement.toString()
+ }
+ })
+ },
+ inspect(value) {
+ // Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options
+ switch (typeof value) {
+ case 'string':
+ if (value.includes("'")) {
+ if (!value.includes('"')) {
+ return `"${value}"`
+ } else if (!value.includes('`') && !value.includes('${')) {
+ return `\`${value}\``
+ }
+ }
+
+ return `'${value}'`
+ case 'number':
+ if (isNaN(value)) {
+ return 'NaN'
+ } else if (Object.is(value, -0)) {
+ return String(value)
+ }
+
+ return value
+ case 'bigint':
+ return `${String(value)}n`
+ case 'boolean':
+ case 'undefined':
+ return String(value)
+ case 'object':
+ return '{}'
+ }
+ },
+ types: {
+ isAsyncFunction(fn) {
+ return fn instanceof AsyncFunction
+ },
+
+ isArrayBufferView(arr) {
+ return ArrayBuffer.isView(arr)
+ }
+ },
+ isBlob
+}
+
+module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom')
diff --git a/tap.yml b/tap.yml
new file mode 100644
index 0000000000..8de8ebe6d5
--- /dev/null
+++ b/tap.yml
@@ -0,0 +1,6 @@
+---
+bail: false
+coverage: false
+node-arg:
+ - --expose-internals
+ - --no-warnings
diff --git a/test/browser.js b/test/browser.js
deleted file mode 100644
index d903d1f11b..0000000000
--- a/test/browser.js
+++ /dev/null
@@ -1,81 +0,0 @@
-if (!global.console) {
- global.console = {};
-}
-if (!global.console.log) {
- global.console.log = function () {};
-}
-if (!global.console.error) {
- global.console.error = global.console.log;
-}
-if (!global.console.info) {
- global.console.info = global.console.log;
-}
-var test = require('tape');
-var util = require('util');
-
-// TODO: add replacements instead
-global.process = {
- env: {},
- on: function () {},
- cwd: function () {
- return '/';
- },
- binding: function () {
- return {
- hasTracing: false
- };
- }
-};
-
-test('streams', function (t) {
- require('./browser/test-stream-big-packet')(t);
- require('./browser/test-stream-big-push')(t);
- require('./browser/test-stream-duplex')(t);
- require('./browser/test-stream-end-paused')(t);
- require('./browser/test-stream-ispaused')(t);
- require('./browser/test-stream-finished')(t);
- require('./browser/test-stream-pipeline')(t);
- require('./browser/test-stream-pipe-after-end')(t);
- require('./browser/test-stream-pipe-cleanup')(t);
- require('./browser/test-stream-pipe-cleanup-pause')(t);
- require('./browser/test-stream-pipe-error-handling')(t);
- require('./browser/test-stream-pipe-event')(t);
- require('./browser/test-stream-push-order')(t);
- require('./browser/test-stream-push-strings')(t);
- require('./browser/test-stream-readable-constructor-set-methods')(t);
- require('./browser/test-stream-readable-event')(t);
- require('./browser/test-stream-transform-constructor-set-methods')(t);
- require('./browser/test-stream-transform-objectmode-falsey-value')(t);
- require('./browser/test-stream-transform-split-objectmode')(t);
- require('./browser/test-stream-unshift-empty-chunk')(t);
- require('./browser/test-stream-unshift-read-race')(t);
- require('./browser/test-stream-writable-change-default-encoding')(t);
- require('./browser/test-stream-writable-constructor-set-methods')(t);
- require('./browser/test-stream-writable-decoded-encoding')(t);
- require('./browser/test-stream-writev')(t);
- require('./browser/test-stream-sync-write')(t);
- require('./browser/test-stream-pipe-without-listenerCount');
-});
-
-test('streams 2', function (t) {
- require('./browser/test-stream2-base64-single-char-read-end')(t);
- require('./browser/test-stream2-compatibility')(t);
- require('./browser/test-stream2-large-read-stall')(t);
- require('./browser/test-stream2-objects')(t);
- require('./browser/test-stream2-pipe-error-handling')(t);
- require('./browser/test-stream2-pipe-error-once-listener')(t);
- require('./browser/test-stream2-push')(t);
- require('./browser/test-stream2-readable-empty-buffer-no-eof')(t);
- // require('./browser/test-stream2-readable-from-list')(t);
- // require('./browser/test-stream2-transform')(t);
- require('./browser/test-stream2-set-encoding')(t);
- require('./browser/test-stream2-readable-legacy-drain')(t);
- require('./browser/test-stream2-readable-wrap-empty')(t);
- require('./browser/test-stream2-readable-non-empty-end')(t);
- require('./browser/test-stream2-readable-wrap')(t);
- require('./browser/test-stream2-unpipe-drain')(t);
- require('./browser/test-stream2-writable')(t);
-});
-test('streams 3', function (t) {
- require('./browser/test-stream3-pause-then-read')(t);
-});
diff --git a/test/browser/fixtures/esbuild-browsers-shims.mjs b/test/browser/fixtures/esbuild-browsers-shims.mjs
new file mode 100644
index 0000000000..9186f40744
--- /dev/null
+++ b/test/browser/fixtures/esbuild-browsers-shims.mjs
@@ -0,0 +1,9 @@
+import * as bufferModule from 'buffer-es6'
+import * as processModule from 'process-es6'
+
+export const process = processModule
+export const Buffer = bufferModule.Buffer
+
+export function setImmediate(fn, ...args) {
+ setTimeout(() => fn(...args), 1)
+}
diff --git a/test/browser/fixtures/esbuild.browser.config.mjs b/test/browser/fixtures/esbuild.browser.config.mjs
new file mode 100644
index 0000000000..6dd371dd9a
--- /dev/null
+++ b/test/browser/fixtures/esbuild.browser.config.mjs
@@ -0,0 +1,23 @@
+import { build } from 'esbuild'
+import alias from 'esbuild-plugin-alias'
+import { createRequire } from 'module'
+
+const require = createRequire(import.meta.url)
+
+build({
+ entryPoints: ['test/browser/test-browser.js'],
+ outfile: 'tmp/esbuild/suite.browser.js',
+ bundle: true,
+ platform: 'browser',
+ plugins: [
+ alias({
+ crypto: require.resolve('crypto-browserify'),
+ path: require.resolve('path-browserify'),
+ stream: require.resolve('stream-browserify')
+ })
+ ],
+ define: {
+ global: 'globalThis'
+ },
+ inject: ['test/browser/fixtures/esbuild-browsers-shims.mjs']
+}).catch(() => process.exit(1))
diff --git a/test/browser/fixtures/esbuild.node.config.mjs b/test/browser/fixtures/esbuild.node.config.mjs
new file mode 100644
index 0000000000..21f70ad284
--- /dev/null
+++ b/test/browser/fixtures/esbuild.node.config.mjs
@@ -0,0 +1,8 @@
+import { build } from 'esbuild'
+
+build({
+ entryPoints: ['test/browser/test-browser.js'],
+ outfile: 'tmp/esbuild/suite.node.js',
+ bundle: true,
+ platform: 'node'
+}).catch(() => process.exit(1))
diff --git a/test/browser/fixtures/index.html b/test/browser/fixtures/index.html
new file mode 100644
index 0000000000..16b329e8e6
--- /dev/null
+++ b/test/browser/fixtures/index.html
@@ -0,0 +1,72 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/browser/fixtures/prepare.sh b/test/browser/fixtures/prepare.sh
new file mode 100644
index 0000000000..56380d61f4
--- /dev/null
+++ b/test/browser/fixtures/prepare.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+set -x -e
+
+[ "$BUNDLER" == "" ] && BUNDLER=$1
+
+if [ "$BUNDLER" != "" ]; then
+ rm -rf tmp/$BUNDLER
+ mkdir -p tmp/$BUNDLER
+ cp test/browser/fixtures/index.html tmp/$BUNDLER
+fi
+
+case $BUNDLER in
+ browserify)
+ browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js
+ browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js
+ ;;
+ esbuild)
+ node src/test/browser/fixtures/esbuild.browser.config.mjs
+ node src/test/browser/fixtures/esbuild.node.config.mjs
+ ;;
+ rollup)
+ rollup -c test/browser/fixtures/rollup.browser.config.mjs
+ rollup -c test/browser/fixtures/rollup.node.config.mjs
+ ;;
+ webpack)
+ webpack -c test/browser/fixtures/webpack.browser.config.mjs
+ webpack -c test/browser/fixtures/webpack.node.config.mjs
+ ;;
+ *)
+ echo "Please set the environment variable BUNDLER to browserify, esbuild, rollup or webpack."
+ exit 1
+ ;;
+esac
\ No newline at end of file
diff --git a/test/browser/fixtures/rollup.browser.config.mjs b/test/browser/fixtures/rollup.browser.config.mjs
new file mode 100644
index 0000000000..43d0e9f4ab
--- /dev/null
+++ b/test/browser/fixtures/rollup.browser.config.mjs
@@ -0,0 +1,27 @@
+import commonjs from '@rollup/plugin-commonjs'
+import inject from '@rollup/plugin-inject'
+import nodeResolve from '@rollup/plugin-node-resolve'
+import { resolve } from 'path'
+import nodePolyfill from 'rollup-plugin-polyfill-node'
+
+export default {
+ input: ['test/browser/test-browser.js'],
+ output: {
+ intro: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }',
+ file: 'tmp/rollup/suite.browser.js',
+ format: 'iife',
+ name: 'readableStreamTestSuite'
+ },
+ plugins: [
+ commonjs(),
+ nodePolyfill(),
+ inject({
+ process: resolve('node_modules/process-es6/browser.js'),
+ Buffer: [resolve('node_modules/buffer-es6/index.js'), 'Buffer']
+ }),
+ nodeResolve({
+ browser: true,
+ preferBuiltins: false
+ })
+ ]
+}
diff --git a/test/browser/fixtures/rollup.node.config.mjs b/test/browser/fixtures/rollup.node.config.mjs
new file mode 100644
index 0000000000..7eac856bce
--- /dev/null
+++ b/test/browser/fixtures/rollup.node.config.mjs
@@ -0,0 +1,19 @@
+import commonjs from '@rollup/plugin-commonjs'
+import nodeResolve from '@rollup/plugin-node-resolve'
+
+export default {
+ input: ['test/browser/test-browser.js'],
+ output: {
+ file: 'tmp/rollup/suite.node.js',
+ format: 'cjs',
+ name: 'readableStreamTestSuite',
+ exports: 'auto'
+ },
+ plugins: [
+ commonjs(),
+ nodeResolve({
+ browser: false,
+ preferBuiltins: true
+ })
+ ]
+}
diff --git a/test/browser/fixtures/webpack.browser.config.mjs b/test/browser/fixtures/webpack.browser.config.mjs
new file mode 100644
index 0000000000..cd40faa72c
--- /dev/null
+++ b/test/browser/fixtures/webpack.browser.config.mjs
@@ -0,0 +1,36 @@
+import { createRequire } from 'module'
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+import webpack from 'webpack'
+
+const require = createRequire(import.meta.url)
+const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../')
+
+export default {
+ entry: './test/browser/test-browser.js',
+ output: {
+ filename: 'suite.browser.js',
+ path: resolve(rootDir, 'tmp/webpack')
+ },
+ mode: 'production',
+ target: 'web',
+ performance: false,
+ plugins: [
+ new webpack.BannerPlugin({
+ banner: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }',
+ raw: true
+ }),
+ new webpack.ProvidePlugin({
+ process: require.resolve('process-es6'),
+ Buffer: [require.resolve('buffer-es6'), 'Buffer']
+ })
+ ],
+ resolve: {
+ aliasFields: ['browser'],
+ fallback: {
+ crypto: require.resolve('crypto-browserify'),
+ path: require.resolve('path-browserify'),
+ stream: require.resolve('stream-browserify')
+ }
+ }
+}
diff --git a/test/browser/fixtures/webpack.node.config.mjs b/test/browser/fixtures/webpack.node.config.mjs
new file mode 100644
index 0000000000..3b20bdef47
--- /dev/null
+++ b/test/browser/fixtures/webpack.node.config.mjs
@@ -0,0 +1,15 @@
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+
+const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../')
+
+export default {
+ entry: './test/browser/test-browser.js',
+ output: {
+ filename: 'suite.node.js',
+ path: resolve(rootDir, 'tmp/webpack')
+ },
+ mode: 'production',
+ target: 'node',
+ performance: false
+}
diff --git a/test/browser/runner-browser.mjs b/test/browser/runner-browser.mjs
new file mode 100644
index 0000000000..e8bb84482c
--- /dev/null
+++ b/test/browser/runner-browser.mjs
@@ -0,0 +1,109 @@
+import { resolve } from 'node:path'
+import { Readable } from 'node:stream'
+import { fileURLToPath } from 'node:url'
+import { chromium, firefox, webkit } from 'playwright'
+import reporter from 'tap-mocha-reporter'
+import Parser from 'tap-parser'
+
+const validBrowsers = ['chrome', 'firefox', 'safari', 'edge']
+const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+
+function parseEnviroment() {
+ const headless = process.env.HEADLESS !== 'false'
+ const reporter = process.env.SKIP_REPORTER !== 'true'
+
+ let [browser, bundler] = process.argv.slice(2, 4)
+
+ if (!browser) {
+ browser = process.env.BROWSER
+ }
+
+ if (!bundler) {
+ bundler = process.env.BUNDLER
+ }
+
+ if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) {
+ console.error(`Usage: node runner-browser.mjs [${validBrowsers.join('|')}] [${validBundlers.join('|')}]`)
+ console.error('You can also use the BROWSER and BUNDLER environment variables.')
+ process.exit(1)
+ }
+
+ return { browser, bundler, headless, reporter }
+}
+
+function createBrowser({ browser: id, headless }) {
+ switch (id) {
+ case 'firefox':
+ return firefox.launch({ headless })
+ case 'safari':
+ return webkit.launch({ headless })
+ case 'edge':
+ return chromium.launch({ headless, channel: 'msedge' })
+ default:
+ return chromium.launch({ headless })
+ }
+}
+
+function setupTape(page, configuration) {
+ const output = new Readable({ read() {} })
+ const parser = new Parser({ strict: true })
+
+ output.pipe(parser)
+
+ if (configuration.reporter) {
+ output.pipe(reporter('spec'))
+ }
+
+ parser.on('line', (line) => {
+ if (line !== '# readable-stream-finished\n') {
+ if (line.startsWith('# not ok')) {
+ process.exitCode = 1
+ }
+
+ if (!configuration.reporter) {
+ console.log(line.replace(/\n$/, ''))
+ }
+
+ return
+ }
+
+ output.push(null)
+
+ if (configuration.headless) {
+ browser.close()
+ }
+ })
+
+ page.on('console', (msg) => {
+ if (msg.type() === 'error') {
+ console.error(`\x1b[31m\x1b[1mconsole.error:\x1b[0m ${msg.text()}\n`)
+ return
+ }
+
+ output.push(msg.text() + '\n')
+ })
+
+ // Firefox in headless mode is showing an error even if onerror caught it. Disable in that case
+ if (!configuration.headless || configuration.browser !== 'firefox') {
+ page.on('pageerror', (err) => {
+ console.log('\x1b[31m\x1b[1m--- The browser thrown an uncaught error ---\x1b[0m')
+ console.log(err.stack)
+
+ if (configuration.headless) {
+ console.log('\x1b[31m\x1b[1m--- Exiting with exit code 1 ---\x1b[0m')
+ process.exit(1)
+ } else {
+ process.exitCode = 1
+ }
+ })
+ }
+}
+
+const configuration = parseEnviroment()
+const browser = await createBrowser(configuration)
+const page = await browser.newPage()
+setupTape(page, configuration)
+
+// Execute the test suite
+const __dirname = fileURLToPath(new URL('.', import.meta.url))
+await page.goto(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/index.html`)}`)
diff --git a/test/browser/runner-node.mjs b/test/browser/runner-node.mjs
new file mode 100644
index 0000000000..840d19e2dc
--- /dev/null
+++ b/test/browser/runner-node.mjs
@@ -0,0 +1,77 @@
+import { resolve } from 'node:path'
+import { Duplex } from 'node:stream'
+import { fileURLToPath } from 'node:url'
+import reporter from 'tap-mocha-reporter'
+import Parser from 'tap-parser'
+
+const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+
+function parseEnviroment() {
+ const reporter = process.env.SKIP_REPORTER !== 'true'
+ const bundler = process.argv[2] || process.env.BUNDLER
+
+ if (!validBundlers.includes(bundler)) {
+ console.error(`Usage: node runner-node.mjs [${validBundlers.join('|')}]`)
+ console.error('You can also use the BUNDLER environment variable.')
+ process.exit(1)
+ }
+
+ return { bundler, reporter }
+}
+
+function setupTape(configuration) {
+ const output = new Duplex({ read() {}, write() {} })
+ const parser = new Parser({ strict: true })
+
+ globalThis.logger = function (message, ...args) {
+ if (typeof message !== 'string') {
+ console.log(message, ...args)
+ return
+ }
+
+ output.push(message + '\n')
+ }
+
+ output.pipe(parser)
+
+ if (configuration.reporter) {
+ output.pipe(reporter('spec'))
+ }
+
+ process.on('uncaughtException', (err) => {
+ if (global.onerror) {
+ global.onerror(err)
+ } else {
+ process.removeAllListeners('uncaughtException')
+ throw err
+ }
+ })
+
+ parser.on('line', (line) => {
+ if (line === '# readable-stream-finished\n') {
+ output.push(null)
+ output.end()
+ return
+ } else if (line.startsWith('# not ok')) {
+ process.exitCode = 1
+ }
+
+ if (!configuration.reporter) {
+ console.log(line.replace(/\n$/, ''))
+ }
+ })
+}
+
+async function main() {
+ const configuration = parseEnviroment()
+ setupTape(configuration)
+
+ // Execute the test suite
+ const __dirname = fileURLToPath(new URL('.', import.meta.url))
+ await import(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/suite.node.js`)}`)
+}
+
+main().catch((e) => {
+ console.error(e)
+ process.exit(1)
+})
diff --git a/test/browser/runner-prepare.mjs b/test/browser/runner-prepare.mjs
new file mode 100644
index 0000000000..76e38f8504
--- /dev/null
+++ b/test/browser/runner-prepare.mjs
@@ -0,0 +1,107 @@
+import { exec } from 'child_process'
+import { promises } from 'fs'
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+import util from '../../lib/ours/util.js'
+const { copyFile, mkdir, rmdir } = promises
+
+function highlightFile(file) {
+ return `\x1b[33m${file.replace(process.cwd() + '/', '')}\x1b[0m`
+}
+
+function info(message) {
+ console.log(`\x1b[34m[INFO]\x1b[0m ${message}`)
+}
+
+function error(message) {
+ console.log(`\x1b[31m[INFO]\x1b[0m ${message}`)
+}
+
+async function run(command) {
+ info(`Executing \x1b[33m${command}\x1b[0m ...`)
+ const { promise, reject, resolve } = util.createDeferredPromise()
+
+ let hasOutput = false
+ function logOutput(chunk) {
+ if (!hasOutput) {
+ hasOutput = true
+ console.log('')
+ }
+
+ console.log(chunk.toString('utf-8').trim().replace(/^/gm, ' '))
+ }
+
+ try {
+ const process = exec(command, { stdio: 'pipe' }, (error) => {
+ if (error) {
+ return reject(error)
+ }
+
+ resolve(error)
+ })
+
+ process.stdout.on('data', logOutput)
+ process.stderr.on('data', logOutput)
+ await promise
+
+ if (hasOutput) {
+ console.log('')
+ }
+ } catch (e) {
+ if (hasOutput) {
+ console.log('')
+ }
+
+ error(`Command failed with status code ${e.code}.`)
+ process.exit(1)
+ }
+}
+
+async function main() {
+ const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+ const bundler = process.argv[2] || process.env.BUNDLER
+
+ if (!validBundlers.includes(bundler)) {
+ error(`Usage: node await runner-prepare.mjs [${validBundlers.join('|')}]`)
+ error('You can also use the BUNDLER environment variable.')
+ process.exit(1)
+ }
+
+ const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), `../../tmp/${bundler}`)
+ const sourceIndex = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../test/browser/fixtures/index.html')
+ const targetIndex = resolve(rootDir, 'index.html')
+
+ info(`Emptying directory ${highlightFile(rootDir)} ...`)
+ try {
+ await rmdir(rootDir, { recursive: true })
+ } catch (e) {
+ // No-op
+ }
+ await mkdir(rootDir, { recursive: true })
+
+ info(`Copying file ${highlightFile(sourceIndex)} to ${highlightFile(targetIndex)} ...`)
+ await copyFile(sourceIndex, targetIndex)
+
+ switch (bundler) {
+ case 'browserify':
+ await run('browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js')
+ await run('browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js')
+ break
+ case 'esbuild':
+ await run('node src/test/browser/fixtures/esbuild.browser.config.mjs')
+ await run('node src/test/browser/fixtures/esbuild.node.config.mjs')
+ break
+ case 'rollup':
+ await run('rollup -c test/browser/fixtures/rollup.browser.config.mjs')
+ await run('rollup -c test/browser/fixtures/rollup.node.config.mjs')
+ break
+ case 'webpack':
+ await run('webpack -c test/browser/fixtures/webpack.browser.config.mjs')
+ await run('webpack -c test/browser/fixtures/webpack.node.config.mjs')
+ }
+}
+
+main().catch((e) => {
+ error(e)
+ process.exit(1)
+})
diff --git a/test/browser/symbols.js b/test/browser/symbols.js
new file mode 100644
index 0000000000..8450b8f64c
--- /dev/null
+++ b/test/browser/symbols.js
@@ -0,0 +1,6 @@
+'use strict'
+
+module.exports = {
+ kReadableStreamSuiteName: Symbol('readable-stream.suiteName'),
+ kReadableStreamSuiteHasMultipleTests: Symbol('readable-stream.suiteHasMultipleTests')
+}
diff --git a/test/browser/test-browser.js b/test/browser/test-browser.js
new file mode 100644
index 0000000000..b4a22f9c08
--- /dev/null
+++ b/test/browser/test-browser.js
@@ -0,0 +1,128 @@
+'use strict'
+
+const logger = globalThis.logger || console.log
+
+const tape = require('tape')
+
+const { createDeferredPromise } = require('../../lib/ours/util')
+
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+let totalTests = 0
+let completed = 0
+let failed = 0
+
+async function test(rootName, fn) {
+ // Gather all tests in the file
+ const tests = {}
+
+ function addTests(name, fn) {
+ tests[`${rootName} - ${name}`] = fn
+ }
+
+ if (fn[kReadableStreamSuiteHasMultipleTests]) {
+ fn(addTests)
+ } else {
+ tests[rootName] = fn
+ } // Execute each test in a separate harness and then output overall results
+
+ for (const [name, subtest] of Object.entries(tests)) {
+ const currentIndex = ++totalTests
+ const harness = tape.createHarness()
+ const { promise, resolve } = createDeferredPromise()
+ const messages = [`# Subtest: ${name}`]
+ harness.createStream().on('data', function (row) {
+ if (row.startsWith('TAP version') || row.match(new RegExp(`^# (?:${name})`))) {
+ return
+ }
+
+ messages.push(row.trim().replace(/^/gm, ' '))
+ })
+ harness.onFinish(() => {
+ const success = harness._exitCode === 0
+ messages.push(`${success ? 'ok' : 'not ok'} ${currentIndex} - ${name}`)
+ logger(messages.join('\n'))
+ completed++
+
+ if (!success) {
+ failed++
+ }
+
+ resolve()
+ })
+ harness(name, subtest)
+ await promise
+ }
+}
+
+async function runTests(suites) {
+ // Setup an interval
+ const interval = setInterval(() => {
+ if (completed < totalTests) {
+ return
+ }
+
+ clearInterval(interval)
+ logger(`1..${totalTests}`)
+ logger(`# tests ${totalTests}`)
+ logger(`# pass ${completed - failed}`)
+ logger(`# fail ${failed}`)
+ logger(`# ${failed === 0 ? 'ok' : 'not ok'}`) // This line is used by the playwright script to detect we're done
+
+ logger('# readable-stream-finished')
+ }, 100) // Execute each test serially, to avoid side-effects errors when dealing with global error handling
+
+ for (const suite of suites) {
+ await test(suite[kReadableStreamSuiteName], suite)
+ }
+} // Important: Do not try to make the require dynamic because bundlers will not like it
+
+runTests([
+ require('./test-stream-big-packet'),
+ require('./test-stream-big-push'),
+ require('./test-stream-duplex'),
+ require('./test-stream-end-paused'),
+ require('./test-stream-finished'),
+ require('./test-stream-ispaused'),
+ require('./test-stream-pipe-after-end'),
+ require('./test-stream-pipe-cleanup-pause'),
+ require('./test-stream-pipe-cleanup'),
+ require('./test-stream-pipe-error-handling'),
+ require('./test-stream-pipe-event'),
+ require('./test-stream-pipe-without-listenerCount'),
+ require('./test-stream-pipeline'),
+ require('./test-stream-push-order'),
+ require('./test-stream-push-strings'),
+ require('./test-stream-readable-constructor-set-methods'),
+ require('./test-stream-readable-event'),
+ require('./test-stream-sync-write'),
+ require('./test-stream-transform-constructor-set-methods'),
+ require('./test-stream-transform-objectmode-falsey-value'),
+ require('./test-stream-transform-split-objectmode'),
+ require('./test-stream-unshift-empty-chunk'),
+ require('./test-stream-unshift-read-race'),
+ require('./test-stream-writable-change-default-encoding'),
+ require('./test-stream-writable-constructor-set-methods'),
+ require('./test-stream-writable-decoded-encoding'),
+ require('./test-stream-writev'),
+ require('./test-stream2-base64-single-char-read-end'),
+ require('./test-stream2-compatibility'),
+ require('./test-stream2-large-read-stall'),
+ require('./test-stream2-objects'),
+ require('./test-stream2-pipe-error-handling'),
+ require('./test-stream2-pipe-error-once-listener'),
+ require('./test-stream2-push'),
+ require('./test-stream2-readable-empty-buffer-no-eof'),
+ require('./test-stream2-readable-from-list'),
+ require('./test-stream2-readable-legacy-drain'),
+ require('./test-stream2-readable-non-empty-end'),
+ require('./test-stream2-readable-wrap-empty'),
+ require('./test-stream2-readable-wrap'),
+ require('./test-stream2-set-encoding'),
+ require('./test-stream2-transform'),
+ require('./test-stream2-unpipe-drain'),
+ require('./test-stream2-writable'),
+ require('./test-stream3-pause-then-read')
+]).catch((e) => {
+ console.error(e)
+})
diff --git a/test/browser/test-stream-big-packet.js b/test/browser/test-stream-big-packet.js
index 2c64ce8e59..f01f59f689 100644
--- a/test/browser/test-stream-big-packet.js
+++ b/test/browser/test-stream-big-packet.js
@@ -1,62 +1,73 @@
-'use strict';
-var common = require('../common');
-var inherits = require('inherits');
-var stream = require('../../');
+'use strict'
+
+const inherits = require('inherits')
+
+const { Transform } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
module.exports = function (t) {
- t.test('big packet', function (t) {
- t.plan(3);
- var passed = false;
-
- function PassThrough() {
- stream.Transform.call(this);
- };
- inherits(PassThrough, stream.Transform);
- PassThrough.prototype._transform = function(chunk, encoding, done) {
- this.push(chunk);
- done();
- };
-
- function TestStream() {
- stream.Transform.call(this);
- };
- inherits(TestStream, stream.Transform);
- TestStream.prototype._transform = function(chunk, encoding, done) {
- if (!passed) {
- // Char 'a' only exists in the last write
- passed = indexOf(chunk.toString(), 'a') >= 0;
- }
- if (passed) {
- t.ok(passed);
- }
- done();
- };
-
- var s1 = new PassThrough();
- var s2 = new PassThrough();
- var s3 = new TestStream();
- s1.pipe(s3);
- // Don't let s2 auto close which may close s3
- s2.pipe(s3, {end: false});
-
- // We must write a buffer larger than highWaterMark
- var big = Buffer.alloc(s1._writableState.highWaterMark + 1);
- big.fill('x');
-
- // Since big is larger than highWaterMark, it will be buffered internally.
- t.ok(!s1.write(big));
- // 'tiny' is small enough to pass through internal buffer.
- t.ok(s2.write('tiny'));
-
- // Write some small data in next IO loop, which will never be written to s3
- // Because 'drain' event is not emitted from s1 and s1 is still paused
- setImmediate(s1.write.bind(s1), 'later');
-
- function indexOf (xs, x) {
- for (var i = 0, l = xs.length; i < l; i++) {
- if (xs[i] === x) return i;
+ t.plan(3)
+ let passed = false
+
+ function PassThrough() {
+ Transform.call(this)
+ }
+
+ inherits(PassThrough, Transform)
+
+ PassThrough.prototype._transform = function (chunk, encoding, done) {
+ this.push(chunk)
+ done()
+ }
+
+ function TestStream() {
+ Transform.call(this)
+ }
+
+ inherits(TestStream, Transform)
+
+ TestStream.prototype._transform = function (chunk, encoding, done) {
+ if (!passed) {
+ // Char 'a' only exists in the last write
+ passed = indexOf(chunk.toString(), 'a') >= 0
+ }
+
+ if (passed) {
+ t.ok(passed)
+ }
+
+ done()
+ }
+
+ const s1 = new PassThrough()
+ const s2 = new PassThrough()
+ const s3 = new TestStream()
+ s1.pipe(s3) // Don't let s2 auto close which may close s3
+
+ s2.pipe(s3, {
+ end: false
+ }) // We must write a buffer larger than highWaterMark
+
+ const big = Buffer.alloc(s1._writableState.highWaterMark + 1)
+ big.fill('x') // Since big is larger than highWaterMark, it will be buffered internally.
+
+ t.notOk(s1.write(big)) // 'tiny' is small enough to pass through internal buffer.
+
+ t.ok(s2.write('tiny')) // Write some small data in next IO loop, which will never be written to s3
+ // Because 'drain' event is not emitted from s1 and s1 is still paused
+
+ setImmediate(s1.write.bind(s1), 'later')
+
+ function indexOf(xs, x) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) {
+ return i
}
- return -1;
}
- });
+
+ return -1
+ }
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-big-packet'
diff --git a/test/browser/test-stream-big-push.js b/test/browser/test-stream-big-push.js
index 7403e16937..01398e63e9 100644
--- a/test/browser/test-stream-big-push.js
+++ b/test/browser/test-stream-big-push.js
@@ -1,68 +1,64 @@
-'use strict';
-var common = require('../common');
-var stream = require('../../');
-module.exports = function (t) {
- t.test('big push', function (t) {
-
- var str = 'asdfasdfasdfasdfasdf';
-
- var r = new stream.Readable({
- highWaterMark: 5,
- encoding: 'utf8'
- });
+'use strict'
- var reads = 0;
- var eofed = false;
- var ended = false;
+const { Readable } = require('../../lib/ours/index')
- r._read = function(n) {
- if (reads === 0) {
- setTimeout(function() {
- r.push(str);
- });
- reads++;
- } else if (reads === 1) {
- var ret = r.push(str);
- t.equal(ret, false);
- reads++;
- } else {
- t.notOk(eofed);
- eofed = true;
- r.push(null);
- }
- };
+const { kReadableStreamSuiteName } = require('./symbols')
- r.on('end', function() {
- ended = true;
- });
+module.exports = function (t) {
+ t.plan(10)
+ const str = 'asdfasdfasdfasdfasdf'
+ const r = new Readable({
+ highWaterMark: 5,
+ encoding: 'utf8'
+ })
+ let reads = 0
+ let eofed = false
+ let ended = false
- // push some data in to start.
- // we've never gotten any read event at this point.
- var ret = r.push(str);
- // should be false. > hwm
- t.notOk(ret);
- var chunk = r.read();
- t.equal(chunk, str);
- chunk = r.read();
- t.equal(chunk, null);
+ r._read = function (n) {
+ if (reads === 0) {
+ setTimeout(function () {
+ r.push(str)
+ })
+ reads++
+ } else if (reads === 1) {
+ const ret = r.push(str)
+ t.equal(ret, false)
+ reads++
+ } else {
+ t.notOk(eofed)
+ eofed = true
+ r.push(null)
+ }
+ }
- r.once('readable', function() {
- // this time, we'll get *all* the remaining data, because
- // it's been added synchronously, as the read WOULD take
- // us below the hwm, and so it triggered a _read() again,
- // which synchronously added more, which we then return.
- chunk = r.read();
- t.equal(chunk, str + str);
+ r.on('end', function () {
+ ended = true
+ }) // push some data in to start.
+ // we've never gotten any read event at this point.
- chunk = r.read();
- t.equal(chunk, null);
- });
+ const ret = r.push(str) // should be false. > hwm
- r.on('end', function() {
- t.ok(eofed);
- t.ok(ended);
- t.equal(reads, 2);
- t.end();
- });
- });
+ t.notOk(ret)
+ let chunk = r.read()
+ t.equal(chunk, str)
+ chunk = r.read()
+ t.equal(chunk, null)
+ r.once('readable', function () {
+ // this time, we'll get *all* the remaining data, because
+ // it's been added synchronously, as the read WOULD take
+ // us below the hwm, and so it triggered a _read() again,
+ // which synchronously added more, which we then return.
+ chunk = r.read()
+ t.equal(chunk, str + str)
+ chunk = r.read()
+ t.equal(chunk, null)
+ })
+ r.on('end', function () {
+ t.ok(eofed)
+ t.ok(ended)
+ t.equal(reads, 2)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-big-push'
diff --git a/test/browser/test-stream-duplex.js b/test/browser/test-stream-duplex.js
index 9bfd6af145..1d768bb393 100644
--- a/test/browser/test-stream-duplex.js
+++ b/test/browser/test-stream-duplex.js
@@ -1,35 +1,40 @@
-'use strict';
-var common = require('../common');
+'use strict'
-var Duplex = require('../../').Transform;
+const { Duplex } = require('../../lib/ours/index')
-var stream = new Duplex({ objectMode: true });
-module.exports = function (t) {
- t.test('duplex', function (t) {
- t.plan(4);
- t.ok(stream._readableState.objectMode);
- t.ok(stream._writableState.objectMode);
-
- var written;
- var read;
-
- stream._write = function(obj, _, cb) {
- written = obj;
- cb();
- };
-
- stream._read = function() {};
+const { kReadableStreamSuiteName } = require('./symbols')
- stream.on('data', function(obj) {
- read = obj;
- });
-
- stream.push({ val: 1 });
- stream.end({ val: 2 });
-
- stream.on('end', function() {
- t.equal(read.val, 1);
- t.equal(written.val, 2);
- });
- });
+module.exports = function (t) {
+ t.plan(4)
+ const stream = new Duplex({
+ objectMode: true
+ })
+ t.ok(stream._readableState.objectMode)
+ t.ok(stream._writableState.objectMode)
+ let written
+ let read
+
+ stream._write = function (obj, _, cb) {
+ written = obj
+ cb()
+ }
+
+ stream._read = function () {}
+
+ stream.on('data', function (obj) {
+ read = obj
+ })
+ stream.on('end', function () {
+ t.equal(read.val, 1)
+ t.equal(written.val, 2)
+ })
+ stream.push({
+ val: 1
+ })
+ stream.end({
+ val: 2
+ })
+ stream.push(null)
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-duplex'
diff --git a/test/browser/test-stream-end-paused.js b/test/browser/test-stream-end-paused.js
index ff56dd8127..722db644b2 100644
--- a/test/browser/test-stream-end-paused.js
+++ b/test/browser/test-stream-end-paused.js
@@ -1,32 +1,30 @@
-'use strict';
-var common = require('../common');
+'use strict'
+const { Readable } = require('../../lib/ours/index')
-// Make sure we don't miss the end event for paused 0-length streams
+const { kReadableStreamSuiteName } = require('./symbols')
-var Readable = require('../../').Readable;
-var stream = new Readable();
module.exports = function (t) {
- t.test('end pause', function (t) {
- t.plan(2);
- var calledRead = false;
- stream._read = function() {
- t.notOk(calledRead);
- calledRead = true;
- this.push(null);
- };
+ t.plan(2)
+ const stream = new Readable()
+ let calledRead = false
- stream.on('data', function() {
- throw new Error('should not ever get data');
- });
- stream.pause();
+ stream._read = function () {
+ t.notOk(calledRead)
+ calledRead = true
+ this.push(null)
+ }
- setTimeout(function() {
- stream.on('end', function() {
- t.ok(calledRead);
- });
- stream.resume();
- });
-
- });
+ stream.on('data', function () {
+ throw new Error('should not ever get data')
+ })
+ stream.pause()
+ setTimeout(function () {
+ stream.on('end', function () {
+ t.ok(calledRead)
+ })
+ stream.resume()
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-end-paused'
diff --git a/test/browser/test-stream-finished.js b/test/browser/test-stream-finished.js
index bd0de3e858..cec12616e2 100644
--- a/test/browser/test-stream-finished.js
+++ b/test/browser/test-stream-finished.js
@@ -1,60 +1,57 @@
-"use strict";
+'use strict'
+const { Writable, Readable, Transform, finished } = require('../../lib/ours/index')
-var common = require('../common');
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
-var _require = require('../../'),
- Writable = _require.Writable,
- Readable = _require.Readable,
- Transform = _require.Transform,
- finished = _require.finished;
-
-module.exports = function (t) {
- t.test('readable finished', function (t) {
-
- var rs = new Readable({
+module.exports = function (test) {
+ test('readable finished', function (t) {
+ t.plan(1)
+ const rs = new Readable({
read: function read() {}
- });
- finished(rs, common.mustCall(function (err) {
- t.ok(!err, 'no error');
- t.end();
- }));
- rs.push(null);
- rs.resume();
- });
- t.test('writable finished', function (t) {
- var ws = new Writable({
+ })
+ finished(rs, (err) => {
+ t.ifErr(err)
+ })
+ rs.push(null)
+ rs.resume()
+ })
+ test('writable finished', function (t) {
+ t.plan(1)
+ const ws = new Writable({
write: function write(data, enc, cb) {
- cb();
+ cb()
}
- });
- finished(ws, common.mustCall(function (err) {
- t.ok(!err, 'no error');
- t.end();
- }));
- ws.end();
- });
- t.test('transform finished', function (t) {
- var tr = new Transform({
+ })
+ finished(ws, (err) => {
+ t.ifErr(err)
+ })
+ ws.end()
+ })
+ test('transform finished', function (t) {
+ t.plan(3)
+ const tr = new Transform({
transform: function transform(data, enc, cb) {
- cb();
+ cb()
}
- });
- var finish = false;
- var ended = false;
+ })
+ let finish = false
+ let ended = false
tr.on('end', function () {
- ended = true;
- });
+ ended = true
+ })
tr.on('finish', function () {
- finish = true;
- });
- finished(tr, common.mustCall(function (err) {
- t.ok(!err, 'no error');
- t.ok(finish);
- t.ok(ended);
- t.end();
- }));
- tr.end();
- tr.resume();
- });
-};
+ finish = true
+ })
+ finished(tr, (err) => {
+ t.ifErr(err)
+ t.ok(finish)
+ t.ok(ended)
+ })
+ tr.end()
+ tr.resume()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-finished'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-ispaused.js b/test/browser/test-stream-ispaused.js
index d080f41ba4..3cc378e909 100644
--- a/test/browser/test-stream-ispaused.js
+++ b/test/browser/test-stream-ispaused.js
@@ -1,27 +1,24 @@
-'use strict';
-var common = require('../common');
+'use strict'
-var stream = require('../../');
-module.exports = function (t) {
- t.test('is paused', function (t) {
- var readable = new stream.Readable();
+const stream = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
- // _read is a noop, here.
- readable._read = Function();
+module.exports = function (t) {
+ t.plan(4)
+ const readable = new stream.Readable() // _read is a noop, here.
- // default state of a stream is not "paused"
- t.notOk(readable.isPaused());
+ readable._read = () => {} // default state of a stream is not "paused"
- // make the stream start flowing...
- readable.on('data', Function());
+ t.notOk(readable.isPaused()) // make the stream start flowing...
- // still not paused.
- t.notOk(readable.isPaused());
+ readable.on('data', () => {}) // still not paused.
- readable.pause();
- t.ok(readable.isPaused());
- readable.resume();
- t.notOk(readable.isPaused());
- t.end();
- });
+ t.notOk(readable.isPaused())
+ readable.pause()
+ t.ok(readable.isPaused())
+ readable.resume()
+ t.notOk(readable.isPaused())
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-ispaused'
diff --git a/test/browser/test-stream-pipe-after-end.js b/test/browser/test-stream-pipe-after-end.js
index 0ca97b3d70..ba65ee54ab 100644
--- a/test/browser/test-stream-pipe-after-end.js
+++ b/test/browser/test-stream-pipe-after-end.js
@@ -1,64 +1,71 @@
-'use strict';
-var common = require('../common');
+'use strict'
+
+const inherits = require('inherits')
+
+const { Readable, Writable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
-var Readable = require('../../lib/_stream_readable');
-var Writable = require('../../lib/_stream_writable');
-var inherits = require('inherits');
module.exports = function (t) {
- t.test('pipe after end', function (t) {
- t.plan(4);
- inherits(TestReadable, Readable);
- function TestReadable(opt) {
- if (!(this instanceof TestReadable))
- return new TestReadable(opt);
- Readable.call(this, opt);
- this._ended = false;
+ t.plan(4)
+
+ function TestReadable(opt) {
+ if (!(this instanceof TestReadable)) {
+ return new TestReadable(opt)
+ }
+
+ Readable.call(this, opt)
+ this._ended = false
+ }
+
+ inherits(TestReadable, Readable)
+
+ TestReadable.prototype._read = function (n) {
+ if (this._ended) {
+ this.emit('error', new Error('_read called twice'))
}
- TestReadable.prototype._read = function(n) {
- if (this._ended)
- this.emit('error', new Error('_read called twice'));
- this._ended = true;
- this.push(null);
- };
-
- inherits(TestWritable, Writable);
- function TestWritable(opt) {
- if (!(this instanceof TestWritable))
- return new TestWritable(opt);
- Writable.call(this, opt);
- this._written = [];
+ this._ended = true
+ this.push(null)
+ }
+
+ function TestWritable(opt) {
+ if (!(this instanceof TestWritable)) {
+ return new TestWritable(opt)
}
- TestWritable.prototype._write = function(chunk, encoding, cb) {
- this._written.push(chunk);
- cb();
- };
-
- // this one should not emit 'end' until we read() from it later.
- var ender = new TestReadable();
- var enderEnded = false;
-
- // what happens when you pipe() a Readable that's already ended?
- var piper = new TestReadable();
- // pushes EOF null, and length=0, so this will trigger 'end'
- piper.read();
-
- setTimeout(function() {
- ender.on('end', function() {
- enderEnded = true;
- t.ok(true, 'enderEnded');
- });
- t.notOk(enderEnded);
- var c = ender.read();
- t.equal(c, null);
-
- var w = new TestWritable();
- w.on('finish', function() {
- t.ok(true, 'writableFinished');
- });
- piper.pipe(w);
-
- });
- });
+ Writable.call(this, opt)
+ this._written = []
+ }
+
+ inherits(TestWritable, Writable)
+
+ TestWritable.prototype._write = function (chunk, encoding, cb) {
+ this._written.push(chunk)
+
+ cb()
+ } // this one should not emit 'end' until we read() from it later.
+
+ const ender = new TestReadable()
+ let enderEnded = false // what happens when you pipe() a Readable that's already ended?
+
+ const piper = new TestReadable() // pushes EOF null, and length=0, so this will trigger 'end'
+
+ piper.read()
+ setTimeout(function () {
+ ender.on('end', function () {
+ enderEnded = true
+ t.ok(true, 'enderEnded')
+ })
+ t.notOk(enderEnded)
+ const c = ender.read()
+ t.equal(c, null)
+ const w = new TestWritable()
+ w.on('finish', function () {
+ t.ok(true, 'writableFinished')
+ })
+ piper.pipe(w)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-after-end'
diff --git a/test/browser/test-stream-pipe-cleanup-pause.js b/test/browser/test-stream-pipe-cleanup-pause.js
index 31fb97f7fa..907551ed44 100644
--- a/test/browser/test-stream-pipe-cleanup-pause.js
+++ b/test/browser/test-stream-pipe-cleanup-pause.js
@@ -1,42 +1,45 @@
-'use strict';
-var common = require('../common');
-var stream = require('../../');
+'use strict'
+
+const stream = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
+
module.exports = function (t) {
- t.test('pipe cleanup pause', function (t) {
- t.plan(3);
- var reader = new stream.Readable();
- var writer1 = new stream.Writable();
- var writer2 = new stream.Writable();
-
- // 560000 is chosen here because it is larger than the (default) highWaterMark
- // and will cause `.write()` to return false
- // See: https://github.com/nodejs/node/issues/2323
- var buffer = Buffer.alloc(560000);
-
- reader._read = function() {};
-
- writer1._write = common.mustCall(function(chunk, encoding, cb) {
- this.emit('chunk-received');
- cb();
- }, 1);
- writer1.once('chunk-received', function() {
- reader.unpipe(writer1);
- reader.pipe(writer2);
- reader.push(buffer);
- setImmediate(function() {
- reader.push(buffer);
- setImmediate(function() {
- reader.push(buffer);
- });
- });
- });
-
- writer2._write = function(chunk, encoding, cb) {
- t.ok(true);
- cb();
- };
-
- reader.pipe(writer1);
- reader.push(buffer);
- });
-};
+ t.plan(3)
+ const reader = new stream.Readable()
+ const writer1 = new stream.Writable()
+ const writer2 = new stream.Writable() // 560000 is chosen here because it is larger than the (default) highWaterMark
+ // and will cause `.write()` to return false
+ // See: https://github.com/nodejs/node/issues/2323
+
+ const buffer = Buffer.alloc(560000)
+
+ reader._read = function () {}
+
+ writer1._write = function (chunk, encoding, cb) {
+ this.emit('chunk-received')
+ cb()
+ }
+
+ writer1.on('chunk-received', function () {
+ reader.unpipe(writer1)
+ reader.pipe(writer2)
+ reader.push(buffer)
+ setImmediate(function () {
+ reader.push(buffer)
+ setImmediate(function () {
+ reader.push(buffer)
+ })
+ })
+ })
+
+ writer2._write = function (chunk, encoding, cb) {
+ t.ok(true)
+ cb()
+ }
+
+ reader.pipe(writer1)
+ reader.push(buffer)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup-pause'
diff --git a/test/browser/test-stream-pipe-cleanup.js b/test/browser/test-stream-pipe-cleanup.js
index dd2b6d5269..ab8d981f6a 100644
--- a/test/browser/test-stream-pipe-cleanup.js
+++ b/test/browser/test-stream-pipe-cleanup.js
@@ -1,108 +1,126 @@
-'use strict';
-// This test asserts that Stream.prototype.pipe does not leave listeners
+'use strict' // This test asserts that Stream.prototype.pipe does not leave listeners
// hanging on the source or dest.
-var common = require('../common');
-var stream = require('../../');
-var inherits = require('inherits');
+const inherits = require('inherits')
+
+const { Stream } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
+
module.exports = function (t) {
- t.test('pipe cleanup', function (t) {
- if (/^v0\.8\./.test(process.version))
- return t.end();
-
- function Writable() {
- this.writable = true;
- this.endCalls = 0;
- require('stream').Stream.call(this);
- }
- inherits(Writable, require('stream').Stream);
- Writable.prototype.end = function() {
- this.endCalls++;
- };
-
- Writable.prototype.destroy = function() {
- this.endCalls++;
- };
-
- function Readable() {
- this.readable = true;
- require('stream').Stream.call(this);
- }
- inherits(Readable, require('stream').Stream);
-
- function Duplex() {
- this.readable = true;
- Writable.call(this);
- }
- inherits(Duplex, Writable);
-
- var i = 0;
- var limit = 100;
-
- var w = new Writable();
-
- var r;
-
- for (i = 0; i < limit; i++) {
- r = new Readable();
- r.pipe(w);
- r.emit('end');
- }
- t.equal(0, r.listeners('end').length);
- t.equal(limit, w.endCalls);
-
- w.endCalls = 0;
-
- for (i = 0; i < limit; i++) {
- r = new Readable();
- r.pipe(w);
- r.emit('close');
- }
- t.equal(0, r.listeners('close').length);
- t.equal(limit, w.endCalls);
-
- w.endCalls = 0;
-
- r = new Readable();
-
- for (i = 0; i < limit; i++) {
- w = new Writable();
- r.pipe(w);
- w.emit('close');
- }
- t.equal(0, w.listeners('close').length);
-
- r = new Readable();
- w = new Writable();
- var d = new Duplex();
- r.pipe(d); // pipeline A
- d.pipe(w); // pipeline B
- t.equal(r.listeners('end').length, 2); // A.onend, A.cleanup
- t.equal(r.listeners('close').length, 2); // A.onclose, A.cleanup
- t.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
- t.equal(d.listeners('close').length, 3); // A.cleanup, B.onclose, B.cleanup
- t.equal(w.listeners('end').length, 0);
- t.equal(w.listeners('close').length, 1); // B.cleanup
-
- r.emit('end');
- t.equal(d.endCalls, 1);
- t.equal(w.endCalls, 0);
- t.equal(r.listeners('end').length, 0);
- t.equal(r.listeners('close').length, 0);
- t.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
- t.equal(d.listeners('close').length, 2); // B.onclose, B.cleanup
- t.equal(w.listeners('end').length, 0);
- t.equal(w.listeners('close').length, 1); // B.cleanup
-
- d.emit('end');
- t.equal(d.endCalls, 1);
- t.equal(w.endCalls, 1);
- t.equal(r.listeners('end').length, 0);
- t.equal(r.listeners('close').length, 0);
- t.equal(d.listeners('end').length, 0);
- t.equal(d.listeners('close').length, 0);
- t.equal(w.listeners('end').length, 0);
- t.equal(w.listeners('close').length, 0);
- t.end();
- });
+ t.plan(27)
+
+ if (/^v0\.8\./.test(process.version)) {
+ return
+ }
+
+ function Writable() {
+ this.writable = true
+ this.endCalls = 0
+ Stream.call(this)
+ }
+
+ inherits(Writable, Stream)
+
+ Writable.prototype.end = function () {
+ this.endCalls++
+ }
+
+ Writable.prototype.destroy = function () {
+ this.endCalls++
+ }
+
+ function Readable() {
+ this.readable = true
+ Stream.call(this)
+ }
+
+ inherits(Readable, Stream)
+
+ Readable.prototype._read = function () {}
+
+ function Duplex() {
+ this.readable = true
+ Writable.call(this)
+ }
+
+ inherits(Duplex, Writable)
+
+ Duplex.prototype._read = function () {}
+
+ let i = 0
+ let r
+ let w = new Writable()
+ const limit = 100
+
+ for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('end')
+ }
+
+ t.equal(0, r.listeners('end').length)
+ t.equal(limit, w.endCalls)
+ w.endCalls = 0
+
+ for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('close')
+ }
+
+ t.equal(0, r.listeners('close').length)
+ t.equal(limit, w.endCalls)
+ w.endCalls = 0
+ r = new Readable()
+
+ for (i = 0; i < limit; i++) {
+ w = new Writable()
+ r.pipe(w)
+ w.emit('close')
+ }
+
+ t.equal(0, w.listeners('close').length)
+ r = new Readable()
+ w = new Writable()
+ const d = new Duplex()
+ r.pipe(d) // pipeline A
+
+ d.pipe(w) // pipeline B
+
+ t.equal(r.listeners('end').length, 2) // A.onend, A.cleanup
+
+ t.equal(r.listeners('close').length, 2) // A.onclose, A.cleanup
+
+ t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup
+
+ t.equal(d.listeners('close').length, 3) // A.cleanup, B.onclose, B.cleanup
+
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 1) // B.cleanup
+
+ r.emit('end')
+ t.equal(d.endCalls, 1)
+ t.equal(w.endCalls, 0)
+ t.equal(r.listeners('end').length, 0)
+ t.equal(r.listeners('close').length, 0)
+ t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup
+
+ t.equal(d.listeners('close').length, 2) // B.onclose, B.cleanup
+
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 1) // B.cleanup
+
+ d.emit('end')
+ t.equal(d.endCalls, 1)
+ t.equal(w.endCalls, 1)
+ t.equal(r.listeners('end').length, 0)
+ t.equal(r.listeners('close').length, 0)
+ t.equal(d.listeners('end').length, 0)
+ t.equal(d.listeners('close').length, 0)
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 0)
+ d.end()
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup'
diff --git a/test/browser/test-stream-pipe-error-handling.js b/test/browser/test-stream-pipe-error-handling.js
index 48a8bb375d..826ec775ae 100644
--- a/test/browser/test-stream-pipe-error-handling.js
+++ b/test/browser/test-stream-pipe-error-handling.js
@@ -1,102 +1,104 @@
-'use strict';
-var common = require('../common');
-var Stream = require('stream').Stream;
+'use strict'
-module.exports = function (t) {
- t.test('Error Listener Catches', function (t) {
- t.plan(1);
- var source = new Stream();
- var dest = new Stream();
+const { Readable, Writable, Stream } = require('../../lib/ours/index')
- source.pipe(dest);
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
- var gotErr = null;
- source.on('error', function(err) {
- gotErr = err;
- });
+module.exports = function (test) {
+ test('Error Listener Catches', function (t) {
+ t.plan(1)
+ const source = new Stream()
+ const dest = new Stream()
- var err = new Error('This stream turned into bacon.');
- source.emit('error', err);
- t.strictEqual(gotErr, err);
- });
+ source._read = function () {}
- t.test('Error WithoutListener Throws', function (t) {
- t.plan(1);
- var source = new Stream();
- var dest = new Stream();
+ source.pipe(dest)
+ let gotErr = null
+ source.on('error', function (err) {
+ gotErr = err
+ })
+ const err = new Error('This stream turned into bacon.')
+ source.emit('error', err)
+ t.strictEqual(gotErr, err)
+ })
+ test('Error WithoutListener Throws', function (t) {
+ t.plan(1)
+ const source = new Stream()
+ const dest = new Stream()
- source.pipe(dest);
+ source._read = function () {}
- var err = new Error('This stream turned into bacon.');
+ source.pipe(dest)
+ const err = new Error('This stream turned into bacon.')
+ let gotErr = null
- var gotErr = null;
try {
- source.emit('error', err);
+ source.emit('error', err)
} catch (e) {
- gotErr = e;
+ gotErr = e
}
- t.strictEqual(gotErr, err);
- });
-
- t.test('Error With Removed Listener Throws', function (t) {
- t.plan(2);
- var EE = require('events').EventEmitter;
- var R = require('../../').Readable;
- var W = require('../../').Writable;
-
- var r = new R();
- var w = new W();
- var removed = false;
-
- r._read = function() {
- setTimeout(function() {
- t.ok(removed);
- t.throws(function() {
- w.emit('error', new Error('fail'));
- });
- });
- };
-
- w.on('error', myOnError);
- r.pipe(w);
- w.removeListener('error', myOnError);
- removed = true;
+ t.strictEqual(gotErr, err)
+ })
+ test('Error With Removed Listener Throws', function (t) {
+ t.plan(2)
+ const onerror = global.onerror
+ const r = new Readable()
+ const w = new Writable()
+ let removed = false
+ let caught = false
+
+ global.onerror = () => {
+ t.notOk(caught)
+ global.onerror = onerror
+ return true
+ }
+
+ r._read = function () {
+ setTimeout(function () {
+ t.ok(removed)
+ w.emit('error', new Error('fail'))
+ })
+ }
+
+ w.on('error', myOnError)
+ r.pipe(w)
+ w.removeListener('error', myOnError)
+ removed = true
function myOnError(er) {
- throw new Error('this should not happen');
+ caught = true
+ }
+ })
+ test('Error Listener Catches When Wrong Listener Is Removed', function (t) {
+ t.plan(2)
+ const r = new Readable()
+ const w = new Writable()
+ let removed = false
+ let caught = false
+
+ r._read = function () {
+ setTimeout(function () {
+ t.ok(removed)
+ w.emit('error', new Error('fail'))
+ })
}
- });
-
- t.test('Error With Removed Listener Throws', function (t) {
- t.plan(2);
- var EE = require('events').EventEmitter;
- var R = require('../../').Readable;
- var W = require('../../').Writable;
-
- var r = new R();
- var w = new W();
- var removed = false;
- var caught = false;
-
- r._read = function() {
- setTimeout(function() {
- t.ok(removed);
- w.emit('error', new Error('fail'));
- });
- };
-
- w.on('error', myOnError);
- w._write = function() {};
-
- r.pipe(w);
- // Removing some OTHER random listener should not do anything
- w.removeListener('error', function() {});
- removed = true;
+
+ w.on('error', myOnError)
+
+ w._write = function () {}
+
+ r.pipe(w) // Removing some OTHER random listener should not do anything
+
+ w.removeListener('error', function () {})
+ removed = true
function myOnError(er) {
- t.notOk(caught);
- caught = true;
+ t.notOk(caught)
+ caught = true
}
- });
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-error-handling'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-pipe-event.js b/test/browser/test-stream-pipe-event.js
index c0d7a606c2..4ec67cecc2 100644
--- a/test/browser/test-stream-pipe-event.js
+++ b/test/browser/test-stream-pipe-event.js
@@ -1,32 +1,38 @@
-'use strict';
-var common = require('../common');
-var stream = require('../../');
-var inherits = require('inherits');
+'use strict'
+
+const inherits = require('inherits')
+
+const { Stream } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
+
module.exports = function (t) {
- t.test('pipe event', function (t) {
- t.plan(1);
- function Writable() {
- this.writable = true;
- require('stream').Stream.call(this);
- }
- inherits(Writable, require('stream').Stream);
-
- function Readable() {
- this.readable = true;
- require('stream').Stream.call(this);
- }
- inherits(Readable, require('stream').Stream);
-
- var passed = false;
-
- var w = new Writable();
- w.on('pipe', function(src) {
- passed = true;
- });
-
- var r = new Readable();
- r.pipe(w);
-
- t.ok(passed);
- });
+ t.plan(1)
+
+ function Writable() {
+ this.writable = true
+ Stream.call(this)
+ }
+
+ inherits(Writable, Stream)
+
+ function Readable() {
+ this.readable = true
+ Stream.call(this)
+ }
+
+ inherits(Readable, Stream)
+ let passed = false
+ const w = new Writable()
+ w.on('pipe', function (src) {
+ passed = true
+ })
+ const r = new Readable()
+
+ r._read = function () {}
+
+ r.pipe(w)
+ t.ok(passed)
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-event'
diff --git a/test/browser/test-stream-pipe-without-listenerCount.js b/test/browser/test-stream-pipe-without-listenerCount.js
index 742e2d233d..ecd414a2f4 100644
--- a/test/browser/test-stream-pipe-without-listenerCount.js
+++ b/test/browser/test-stream-pipe-without-listenerCount.js
@@ -1,27 +1,20 @@
-'use strict';
-var Stream = require('../../');
+'use strict'
-module.exports = function (t) {
- t.tets('pipe without listenerCount', function (t) {
- t.plan(2);
- var r = new Stream({
- read: function (){}});
- r.listenerCount = undefined;
-
- var w = new Stream();
- w.listenerCount = undefined;
+const { Stream } = require('../../lib/ours/index')
- w.on('pipe', function() {
- r.emit('error', new Error('Readable Error'));
- w.emit('error', new Error('Writable Error'));
- });
- r.on('error', function (e) {
- t.ok(e, 'readable error');
- });
- w.on('error', function (e) {
- t.ok(e, 'writable error');
- });
- r.pipe(w);
+const { kReadableStreamSuiteName } = require('./symbols')
- });
+module.exports = function (t) {
+ t.plan(1)
+ const r = new Stream({
+ read: function () {}
+ })
+ r.listenerCount = undefined
+ const w = new Stream()
+ w.on('pipe', function () {
+ r.emit('error', new Error('Readable Error'))
+ })
+ t.throws(() => r.pipe(w), 'TypeError: this.listenerCount is not a function')
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-without-listenerCount'
diff --git a/test/browser/test-stream-pipeline.js b/test/browser/test-stream-pipeline.js
index 833d58887c..f1e2c18b33 100644
--- a/test/browser/test-stream-pipeline.js
+++ b/test/browser/test-stream-pipeline.js
@@ -1,112 +1,102 @@
-"use strict";
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-var common = require('../common');
-
-var _require = require('../../'),
- Writable = _require.Writable,
- Readable = _require.Readable,
- Transform = _require.Transform,
- finished = _require.finished,
- pipeline = _require.pipeline;
-
-module.exports = function (t) {
- t.test('pipeline', function (t) {
- var finished = false;
- var processed = [];
- var expected = [bufferShim.from('a'), bufferShim.from('b'),
- bufferShim.from('c')];
- var read = new Readable({
- read: function read() {
- }
- });
- var write = new Writable({
+'use strict'
+
+const { Readable, Writable, pipeline } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('pipeline', function (t) {
+ t.plan(3)
+ let finished = false
+ const processed = []
+ const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')]
+ const read = new Readable({
+ read: function read() {}
+ })
+ const write = new Writable({
write: function write(data, enc, cb) {
- processed.push(data);
- cb();
+ processed.push(data)
+ cb()
}
- });
+ })
write.on('finish', function () {
- finished = true;
- });
+ finished = true
+ })
- for (var i = 0; i < expected.length; i++) {
- read.push(expected[i]);
+ for (let i = 0; i < expected.length; i++) {
+ read.push(expected[i])
}
- read.push(null);
- pipeline(read, write, common.mustCall(function (err) {
- t.ok(!err, 'no error');
- t.ok(finished);
- t.deepEqual(processed, expected);
- t.end();
- }));
- });
- t.test('pipeline missing args', function (t) {
- var _read = new Readable({
- read: function read() {
- }
- });
+ read.push(null)
+ pipeline(read, write, (err) => {
+ t.ifErr(err)
+ t.ok(finished)
+ t.deepEqual(processed, expected)
+ })
+ })
+ test('pipeline missing args', function (t) {
+ t.plan(3)
+
+ const _read = new Readable({
+ read: function read() {}
+ })
t.throws(function () {
- pipeline(_read, function () {
- });
- });
+ pipeline(_read, function () {})
+ })
t.throws(function () {
- pipeline(function () {
- });
- });
+ pipeline(function () {})
+ })
t.throws(function () {
- pipeline();
- });
- t.end();
- });
- t.test('pipeline error', function (t) {
- var _read2 = new Readable({
- read: function read() {
- }
- });
+ pipeline()
+ })
+ })
+ test('pipeline error', function (t) {
+ t.plan(1)
+
+ const _read2 = new Readable({
+ read: function read() {}
+ })
- var _write = new Writable({
+ const _write = new Writable({
write: function write(data, enc, cb) {
- cb();
+ cb()
}
- });
+ })
- _read2.push('data');
+ _read2.push('data')
setImmediate(function () {
- return _read2.destroy();
- });
- pipeline(_read2, _write, common.mustCall(function (err) {
- t.ok(err, 'should have an error');
- t.end();
- }));
- });
- t.test('pipeline destroy', function () {
- var _read3 = new Readable({
- read: function read() {
- }
- });
+ return _read2.destroy()
+ })
+ pipeline(_read2, _write, (err) => {
+ t.equal(err.message, 'Premature close')
+ })
+ })
+ test('pipeline destroy', function (t) {
+ t.plan(2)
- var _write2 = new Writable({
+ const _read3 = new Readable({
+ read: function read() {}
+ })
+
+ const _write2 = new Writable({
write: function write(data, enc, cb) {
- cb();
+ cb()
}
- });
+ })
- _read3.push('data');
+ _read3.push('data')
setImmediate(function () {
- return _read3.destroy(new Error('kaboom'));
- });
- var dst = pipeline(_read3, _write2, common.mustCall(function (err) {
- t.equal(err.message, 'kaboom');
- t.end();
- }));
- t.equal(dst, _write2);
- });
-};
+ return _read3.destroy(new Error('kaboom'))
+ })
+ const dst = pipeline(_read3, _write2, (err) => {
+ t.equal(err.message, 'kaboom')
+ })
+ t.equal(dst, _write2)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipeline'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-push-order.js b/test/browser/test-stream-push-order.js
index 22fe17d1b0..430182c3c9 100644
--- a/test/browser/test-stream-push-order.js
+++ b/test/browser/test-stream-push-order.js
@@ -1,33 +1,33 @@
-'use strict';
-var common = require('../common');
-var Readable = require('../../').Readable;
-module.exports = function (t) {
- t.test('push order', function (t) {
- t.plan(1);
- var s = new Readable({
- highWaterMark: 20,
- encoding: 'ascii'
- });
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
- var list = ['1', '2', '3', '4', '5', '6'];
+const { kReadableStreamSuiteName } = require('./symbols')
- s._read = function(n) {
- var one = list.shift();
- if (!one) {
- s.push(null);
- } else {
- var two = list.shift();
- s.push(one);
- s.push(two);
- }
- };
+module.exports = function (t) {
+ t.plan(1)
+ const s = new Readable({
+ highWaterMark: 20,
+ encoding: 'ascii'
+ })
+ const list = ['1', '2', '3', '4', '5', '6']
- var v = s.read(0);
+ s._read = function (n) {
+ const one = list.shift()
- // ACTUALLY [1, 3, 5, 6, 4, 2]
+ if (!one) {
+ s.push(null)
+ } else {
+ const two = list.shift()
+ s.push(one)
+ s.push(two)
+ }
+ }
- setTimeout(function() {
- t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6');
- });
- });
+ s.read(0)
+ setTimeout(function () {
+ t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6')
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-push-order'
diff --git a/test/browser/test-stream-push-strings.js b/test/browser/test-stream-push-strings.js
index 1de240efd8..fb20f3cd39 100644
--- a/test/browser/test-stream-push-strings.js
+++ b/test/browser/test-stream-push-strings.js
@@ -1,49 +1,63 @@
-'use strict';
-var common = require('../common');
+'use strict'
-var Readable = require('../../').Readable;
-var inherits = require('inherits');
+const inherits = require('inherits')
+
+const { Readable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
module.exports = function (t) {
- t.test('push strings', function (t) {
- t.plan(2);
- inherits(MyStream, Readable);
- function MyStream(options) {
- Readable.call(this, options);
- this._chunks = 3;
+ t.plan(2)
+
+ function MyStream(options) {
+ Readable.call(this, options)
+ this._chunks = 3
+ }
+
+ inherits(MyStream, Readable)
+
+ MyStream.prototype._read = function (n) {
+ switch (this._chunks--) {
+ case 0:
+ return this.push(null)
+
+ case 1:
+ return setTimeout(
+ function () {
+ this.push('last chunk')
+ }.bind(this),
+ 100
+ )
+
+ case 2:
+ return this.push('second to last chunk')
+
+ case 3:
+ return process.nextTick(
+ function () {
+ this.push('first chunk')
+ }.bind(this)
+ )
+
+ default:
+ throw new Error('?')
}
+ }
- MyStream.prototype._read = function(n) {
- switch (this._chunks--) {
- case 0:
- return this.push(null);
- case 1:
- return setTimeout(function() {
- this.push('last chunk');
- }.bind(this), 100);
- case 2:
- return this.push('second to last chunk');
- case 3:
- return process.nextTick(function() {
- this.push('first chunk');
- }.bind(this));
- default:
- throw new Error('?');
- }
- };
- var expect = [ 'first chunksecond to last chunk', 'last chunk' ];
-
- var ms = new MyStream();
- var results = [];
- ms.on('readable', function() {
- var chunk;
- while (null !== (chunk = ms.read()))
- results.push(chunk + '');
- });
-
- ms.on('end', function() {
- t.equal(ms._chunks, -1);
- t.deepEqual(results, expect);
- });
- });
+ const expect = ['first chunksecond to last chunk', 'last chunk']
+ const ms = new MyStream()
+ const results = []
+ ms.on('readable', function () {
+ let chunk
+
+ while ((chunk = ms.read()) !== null) {
+ results.push(chunk + '')
+ }
+ })
+ ms.on('end', function () {
+ t.equal(ms._chunks, -1)
+ t.deepEqual(results, expect)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-push-strings'
diff --git a/test/browser/test-stream-readable-constructor-set-methods.js b/test/browser/test-stream-readable-constructor-set-methods.js
index fa0d59ba28..8461661d9d 100644
--- a/test/browser/test-stream-readable-constructor-set-methods.js
+++ b/test/browser/test-stream-readable-constructor-set-methods.js
@@ -1,22 +1,26 @@
-'use strict';
-var common = require('../common');
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
-var Readable = require('../../').Readable;
module.exports = function (t) {
- t.test('readable constructor set methods', function (t) {
- t.plan(2);
- var _readCalled = false;
- function _read(n) {
- _readCalled = true;
- this.push(null);
- }
+ t.plan(2)
+ let _readCalled = false
- var r = new Readable({ read: _read });
- r.resume();
+ function _read(n) {
+ _readCalled = true
+ this.push(null)
+ }
- setTimeout(function() {
- t.equal(r._read, _read);
- t.ok(_readCalled);
- });
- });
+ const r = new Readable({
+ read: _read
+ })
+ r.resume()
+ setTimeout(function () {
+ t.equal(r._read, _read)
+ t.ok(_readCalled)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-readable-constructor-set-methods'
diff --git a/test/browser/test-stream-readable-event.js b/test/browser/test-stream-readable-event.js
index 474168aad4..8784698a30 100644
--- a/test/browser/test-stream-readable-event.js
+++ b/test/browser/test-stream-readable-event.js
@@ -1,114 +1,96 @@
-'use strict';
-var common = require('../common');
-
-var Readable = require('../../').Readable;
-
-function first(t) {
- // First test, not reading when the readable is added.
- // make sure that on('readable', ...) triggers a readable event.
- var r = new Readable({
- highWaterMark: 3
- });
-
- var _readCalled = false;
- r._read = function(n) {
- _readCalled = true;
- };
-
- // This triggers a 'readable' event, which is lost.
- r.push(Buffer.from('blerg'));
-
- var caughtReadable = false;
- setTimeout(function() {
- // we're testing what we think we are
- t.notOk(r._readableState.reading);
- r.on('readable', function() {
- caughtReadable = true;
- setTimeout(function() {
- // we're testing what we think we are
- t.notOk(_readCalled);
-
- t.ok(caughtReadable);
- t.end();
- });
- });
- });
-
-
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('readable events - first', (t) => {
+ t.plan(3) // First test, not reading when the readable is added.
+ // make sure that on('readable', ...) triggers a readable event.
+
+ const r = new Readable({
+ highWaterMark: 3
+ })
+ let _readCalled = false
+
+ r._read = function (n) {
+ _readCalled = true
+ } // This triggers a 'readable' event, which is lost.
+
+ r.push(Buffer.from('blerg'))
+ let caughtReadable = false
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(_readCalled)
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+ test('readable events - second', (t) => {
+ t.plan(3) // second test, make sure that readable is re-emitted if there's
+ // already a length, while it IS reading.
+
+ const r = new Readable({
+ highWaterMark: 3
+ })
+ let _readCalled = false
+
+ r._read = function (n) {
+ _readCalled = true
+ } // This triggers a 'readable' event, which is lost.
+
+ r.push(Buffer.from('bl'))
+ let caughtReadable = false
+ setTimeout(function () {
+ // assert we're testing what we think we are
+ t.ok(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.ok(_readCalled)
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+ test('readable events - third', (t) => {
+ t.plan(3) // Third test, not reading when the stream has not passed
+ // the highWaterMark but *has* reached EOF.
+
+ const r = new Readable({
+ highWaterMark: 30
+ })
+ let _readCalled = false
+
+ r._read = function (n) {
+ _readCalled = true
+ } // This triggers a 'readable' event, which is lost.
+
+ r.push(Buffer.from('blerg'))
+ r.push(null)
+ let caughtReadable = false
+ setTimeout(function () {
+ // assert we're testing what we think we are
+ t.notOk(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(_readCalled)
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
}
-function second(t) {
- // second test, make sure that readable is re-emitted if there's
- // already a length, while it IS reading.
-
- var r = new Readable({
- highWaterMark: 3
- });
-
- var _readCalled = false;
- r._read = function(n) {
- _readCalled = true;
- };
-
- // This triggers a 'readable' event, which is lost.
- r.push(Buffer.from('bl'));
-
- var caughtReadable = false;
- setTimeout(function() {
- // assert we're testing what we think we are
- t.ok(r._readableState.reading);
- r.on('readable', function() {
- caughtReadable = true;
- setTimeout(function() {
- // we're testing what we think we are
- t.ok(_readCalled);
-
- t.ok(caughtReadable);
- t.end();
- });
- });
- });
-
-}
-
-function third(t) {
- // Third test, not reading when the stream has not passed
- // the highWaterMark but *has* reached EOF.
- var r = new Readable({
- highWaterMark: 30
- });
-
- var _readCalled = false;
- r._read = function(n) {
- _readCalled = true;
- };
-
- // This triggers a 'readable' event, which is lost.
- r.push(Buffer.from('blerg'));
- r.push(null);
-
- var caughtReadable = false;
- setTimeout(function() {
- // assert we're testing what we think we are
- t.notOk(r._readableState.reading);
- r.on('readable', function() {
- caughtReadable = true;
- setTimeout(function() {
- // we're testing what we think we are
- t.notOk(_readCalled);
-
- t.ok(caughtReadable);
- t.end();
- });
- });
- });
-
-};
-
-module.exports = function (t) {
- t.test('readable events', function (t) {
- t.test('first', first);
- t.test('second', second);
- t.test('third', third);
- });
-}
+module.exports[kReadableStreamSuiteName] = 'stream-readable-event'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-sync-write.js b/test/browser/test-stream-sync-write.js
index c07c1672d2..2ab9b4e512 100644
--- a/test/browser/test-stream-sync-write.js
+++ b/test/browser/test-stream-sync-write.js
@@ -1,39 +1,52 @@
-require('../common');
-var inherits = require('inherits');
-var stream = require('../../');
-var WritableStream = stream.Writable;
-module.exports = function(t) {
- t.test('should bea ble to write sync', function(t) {
- var InternalStream = function() {
- WritableStream.call(this);
- };
- inherits(InternalStream, WritableStream);
+'use strict'
- InternalStream.prototype._write = function(chunk, encoding, callback) {
- callback();
- };
+const inherits = require('inherits')
- var internalStream = new InternalStream();
+const { Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(2)
+ let internalCalls = 0
+ let externalCalls = 0
- var ExternalStream = function(writable) {
- this._writable = writable;
- WritableStream.call(this);
- };
- inherits(ExternalStream, WritableStream);
+ const InternalStream = function () {
+ Writable.call(this)
+ }
- ExternalStream.prototype._write = function(chunk, encoding, callback) {
- this._writable.write(chunk, encoding, callback);
- };
+ inherits(InternalStream, Writable)
+ InternalStream.prototype._write = function (chunk, encoding, callback) {
+ internalCalls++
+ callback()
+ }
+ const internalStream = new InternalStream()
- var externalStream = new ExternalStream(internalStream);
+ const ExternalStream = function (writable) {
+ this._writable = writable
+ Writable.call(this)
+ }
- for (var i = 0; i < 2000; i++) {
- externalStream.write(i.toString());
- }
- t.end();
- });
+ inherits(ExternalStream, Writable)
+
+ ExternalStream.prototype._write = function (chunk, encoding, callback) {
+ externalCalls++
+
+ this._writable.write(chunk, encoding, callback)
+ }
+
+ const externalStream = new ExternalStream(internalStream)
+
+ for (let i = 0; i < 2000; i++) {
+ externalStream.write(i.toString())
+ }
+
+ externalStream.end(() => {
+ t.equal(internalCalls, 2000)
+ t.equal(externalCalls, 2000)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-sync-write'
diff --git a/test/browser/test-stream-transform-constructor-set-methods.js b/test/browser/test-stream-transform-constructor-set-methods.js
index c820f8cac7..257224c122 100644
--- a/test/browser/test-stream-transform-constructor-set-methods.js
+++ b/test/browser/test-stream-transform-constructor-set-methods.js
@@ -1,35 +1,37 @@
-'use strict';
-var common = require('../common');
+'use strict'
+
+const { Transform } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
-var Transform = require('../../').Transform;
module.exports = function (t) {
- t.test('transform constructor set methods', function (t) {
- var _transformCalled = false;
- function _transform(d, e, n) {
- _transformCalled = true;
- n();
- }
-
- var _flushCalled = false;
- function _flush(n) {
- _flushCalled = true;
- n();
- }
-
- var tr = new Transform({
- transform: _transform,
- flush: _flush
- });
-
- tr.end(Buffer.from('blerg'));
- tr.resume();
-
- tr.on('end', function() {
- t.equal(tr._transform, _transform);
- t.equal(tr._flush, _flush);
- t.ok(_transformCalled);
- t.ok(_flushCalled);
- t.end();
- });
- });
+ t.plan(4)
+ let _transformCalled = false
+
+ function _transform(d, e, n) {
+ _transformCalled = true
+ n()
+ }
+
+ let _flushCalled = false
+
+ function _flush(n) {
+ _flushCalled = true
+ n()
+ }
+
+ const tr = new Transform({
+ transform: _transform,
+ flush: _flush
+ })
+ tr.end(Buffer.from('blerg'))
+ tr.resume()
+ tr.on('end', function () {
+ t.equal(tr._transform, _transform)
+ t.equal(tr._flush, _flush)
+ t.ok(_transformCalled)
+ t.ok(_flushCalled)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-transform-constructor-set-methods'
diff --git a/test/browser/test-stream-transform-objectmode-falsey-value.js b/test/browser/test-stream-transform-objectmode-falsey-value.js
index 3b226a7c26..69a9876497 100644
--- a/test/browser/test-stream-transform-objectmode-falsey-value.js
+++ b/test/browser/test-stream-transform-objectmode-falsey-value.js
@@ -1,36 +1,39 @@
-'use strict';
-var common = require('../common');
+'use strict'
-var stream = require('../../');
-var PassThrough = stream.PassThrough;
-module.exports = function (t) {
- t.test('transform objectmode falsey value', function (t) {
- var src = new PassThrough({ objectMode: true });
- var tx = new PassThrough({ objectMode: true });
- var dest = new PassThrough({ objectMode: true });
-
- var expect = [ -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ];
- var results = [];
- dest.on('end', function() {
- t.deepEqual(results, expect);
- t.end();
- });
-
- dest.on('data', function(x) {
- results.push(x);
- });
+const { PassThrough } = require('../../lib/ours/index')
- src.pipe(tx).pipe(dest);
+const { kReadableStreamSuiteName } = require('./symbols')
- var i = -1;
- var int = setInterval(function() {
- if (i > 10) {
- src.end();
- clearInterval(int);
- } else {
- t.ok(true);
- src.write(i++);
- }
- }, 10);
- });
+module.exports = function (t) {
+ t.plan(13)
+ const src = new PassThrough({
+ objectMode: true
+ })
+ const tx = new PassThrough({
+ objectMode: true
+ })
+ const dest = new PassThrough({
+ objectMode: true
+ })
+ const expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+ const results = []
+ dest.on('end', function () {
+ t.deepEqual(results, expect)
+ })
+ dest.on('data', function (x) {
+ results.push(x)
+ })
+ src.pipe(tx).pipe(dest)
+ let i = -1
+ const int = setInterval(function () {
+ if (i > 10) {
+ src.end()
+ clearInterval(int)
+ } else {
+ t.ok(true)
+ src.write(i++)
+ }
+ }, 10)
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-transform-objectmode-falsey-value'
diff --git a/test/browser/test-stream-transform-split-objectmode.js b/test/browser/test-stream-transform-split-objectmode.js
index 51e7ff649a..551dfcbe8b 100644
--- a/test/browser/test-stream-transform-split-objectmode.js
+++ b/test/browser/test-stream-transform-split-objectmode.js
@@ -1,58 +1,58 @@
-'use strict';
-var common = require('../common');
-
-var Transform = require('../../').Transform;
-module.exports = function (t) {
- t.test('transform split objectmode', function (t) {
- t.plan(10);
- var parser = new Transform({ readableObjectMode : true });
-
- t.ok(parser._readableState.objectMode, 'parser 1');
- t.notOk(parser._writableState.objectMode, 'parser 2');
- t.equals(parser._readableState.highWaterMark, 16, 'parser 3');
- t.equals(parser._writableState.highWaterMark, (16 * 1024), 'parser 4');
-
- parser._transform = function(chunk, enc, callback) {
- callback(null, { val : chunk[0] });
- };
-
- var parsed;
-
- parser.on('data', function(obj) {
- parsed = obj;
- });
-
- parser.end(Buffer.from([42]));
-
- parser.on('end', function() {
- t.equals(parsed.val, 42, 'parser ended');
- });
+'use strict'
+const { Transform } = require('../../lib/ours/index')
- var serializer = new Transform({ writableObjectMode : true });
+const { kReadableStreamSuiteName } = require('./symbols')
- t.notOk(serializer._readableState.objectMode, 'serializer 1');
- t.ok(serializer._writableState.objectMode, 'serializer 2');
- t.equals(serializer._readableState.highWaterMark, (16 * 1024), 'serializer 3');
- t.equals(serializer._writableState.highWaterMark, 16, 'serializer 4');
-
- serializer._transform = function(obj, _, callback) {
- callback(null, Buffer.from([obj.val]));
- };
-
- var serialized;
-
- serializer.on('data', function(chunk) {
- serialized = chunk;
- });
-
- serializer.write({ val : 42 });
-
- serializer.on('end', function() {
- t.equals(serialized[0], 42, 'searlizer ended');
- });
- setImmediate(function () {
- serializer.end();
- });
- });
+module.exports = function (t) {
+ t.plan(10)
+ const parser = new Transform({
+ readableObjectMode: true
+ })
+ t.ok(parser._readableState.objectMode, 'parser 1')
+ t.notOk(parser._writableState.objectMode, 'parser 2')
+ t.equals(parser._readableState.highWaterMark, 16, 'parser 3')
+ t.equals(parser._writableState.highWaterMark, 16 * 1024, 'parser 4')
+
+ parser._transform = function (chunk, enc, callback) {
+ callback(null, {
+ val: chunk[0]
+ })
+ }
+
+ let parsed
+ parser.on('data', function (obj) {
+ parsed = obj
+ })
+ parser.end(Buffer.from([42]))
+ parser.on('end', function () {
+ t.equals(parsed.val, 42, 'parser ended')
+ })
+ const serializer = new Transform({
+ writableObjectMode: true
+ })
+ t.notOk(serializer._readableState.objectMode, 'serializer 1')
+ t.ok(serializer._writableState.objectMode, 'serializer 2')
+ t.equals(serializer._readableState.highWaterMark, 16 * 1024, 'serializer 3')
+ t.equals(serializer._writableState.highWaterMark, 16, 'serializer 4')
+
+ serializer._transform = function (obj, _, callback) {
+ callback(null, Buffer.from([obj.val]))
+ }
+
+ let serialized
+ serializer.on('data', function (chunk) {
+ serialized = chunk
+ })
+ serializer.write({
+ val: 42
+ })
+ serializer.on('end', function () {
+ t.equals(serialized[0], 42, 'searlizer ended')
+ })
+ setImmediate(function () {
+ serializer.end()
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-transform-split-objectmode'
diff --git a/test/browser/test-stream-unshift-empty-chunk.js b/test/browser/test-stream-unshift-empty-chunk.js
index 61f9db83a0..e0494e2554 100644
--- a/test/browser/test-stream-unshift-empty-chunk.js
+++ b/test/browser/test-stream-unshift-empty-chunk.js
@@ -1,63 +1,63 @@
-'use strict';
-var common = require('../common');
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
-// This test verifies that stream.unshift(Buffer(0)) or
-// stream.unshift('') does not set state.reading=false.
-var Readable = require('../../').Readable;
module.exports = function (t) {
- t.test('unshift empty chunk', function (t) {
- t.plan(1);
- var r = new Readable();
- var nChunks = 10;
- var chunk = Buffer.alloc(10);
- chunk.fill('x');
-
- r._read = function(n) {
- setTimeout(function() {
- r.push(--nChunks === 0 ? null : chunk);
- });
- };
-
- var readAll = false;
- var seen = [];
- r.on('readable', function() {
- var chunk;
- while (chunk = r.read()) {
- seen.push(chunk.toString());
- // simulate only reading a certain amount of the data,
- // and then putting the rest of the chunk back into the
- // stream, like a parser might do. We just fill it with
- // 'y' so that it's easy to see which bits were touched,
- // and which were not.
- var putBack = Buffer.alloc(readAll ? 0 : 5);
- putBack.fill('y');
- readAll = !readAll;
- r.unshift(putBack);
- }
- });
-
- var expect =
- [ 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy' ];
-
- r.on('end', function() {
- t.deepEqual(seen, expect);
- });
- });
+ t.plan(1)
+ const r = new Readable()
+ let nChunks = 10
+ const chunk = Buffer.alloc(10)
+ chunk.fill('x')
+
+ r._read = function (n) {
+ setTimeout(function () {
+ r.push(--nChunks === 0 ? null : chunk)
+ })
+ }
+
+ let readAll = false
+ const seen = []
+ r.on('readable', function () {
+ let chunk
+
+ while ((chunk = r.read())) {
+ seen.push(chunk.toString()) // simulate only reading a certain amount of the data,
+ // and then putting the rest of the chunk back into the
+ // stream, like a parser might do. We just fill it with
+ // 'y' so that it's easy to see which bits were touched,
+ // and which were not.
+
+ const putBack = Buffer.alloc(readAll ? 0 : 5)
+ putBack.fill('y')
+ readAll = !readAll
+ r.unshift(putBack)
+ }
+ })
+ const expect = [
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy'
+ ]
+ r.on('end', function () {
+ t.deepEqual(seen, expect)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-unshift-empty-chunk'
diff --git a/test/browser/test-stream-unshift-read-race.js b/test/browser/test-stream-unshift-read-race.js
index 16a3966bec..3e68b3cfdd 100644
--- a/test/browser/test-stream-unshift-read-race.js
+++ b/test/browser/test-stream-unshift-read-race.js
@@ -1,110 +1,132 @@
-'use strict';
-var common = require('../common');
-
-// This test verifies that:
+'use strict' // This test verifies that:
// 1. unshift() does not cause colliding _read() calls.
// 2. unshift() after the 'end' event is an error, but after the EOF
// signalling null, it is ok, and just creates a new readable chunk.
// 3. push() after the EOF signaling null is an error.
// 4. _read() is not called after pushing the EOF null chunk.
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
+
module.exports = function (t) {
- t.test('unshift read race', function (tape) {
- var hwm = 10;
- var r = stream.Readable({ highWaterMark: hwm });
- var chunks = 10;
- var t = (chunks * 5);
-
- var data = Buffer.alloc(chunks * hwm + Math.ceil(hwm / 2));
- for (var i = 0; i < data.length; i++) {
- var c = 'asdf'.charCodeAt(i % 4);
- data[i] = c;
- }
+ t.plan(139)
+ const hwm = 10
+ const r = stream.Readable({
+ highWaterMark: hwm
+ })
+ const chunks = 10
+ const data = Buffer.alloc(chunks * hwm + Math.ceil(hwm / 2))
+
+ for (let i = 0; i < data.length; i++) {
+ const c = 'asdf'.charCodeAt(i % 4)
+ data[i] = c
+ }
+
+ let pos = 0
+ let pushedNull = false
+
+ r._read = function (n) {
+ t.notOk(pushedNull, '_read after null push') // every third chunk is fast
- var pos = 0;
- var pushedNull = false;
- r._read = function(n) {
- tape.notOk(pushedNull, '_read after null push');
-
- // every third chunk is fast
- push(!(chunks % 3));
-
- function push(fast) {
- tape.notOk(pushedNull, 'push() after null push');
- var c = pos >= data.length ? null : data.slice(pos, Math.min(pos + n, data.length));
- pushedNull = c === null;
- if (fast) {
- pos += n;
- r.push(c);
- if (c === null) pushError();
- } else {
- setTimeout(function() {
- pos += n;
- r.push(c);
- if (c === null) pushError();
- });
+ push(!(chunks % 3))
+
+ function push(fast) {
+ t.notOk(pushedNull, 'push() after null push')
+ const c = pos >= data.length ? null : data.slice(pos, pos + n)
+ pushedNull = c === null
+
+ if (fast) {
+ pos += n
+ r.push(c)
+
+ if (c === null) {
+ pushError()
}
+ } else {
+ setTimeout(function () {
+ pos += n
+ r.push(c)
+
+ if (c === null) {
+ pushError()
+ }
+ }, 1)
}
- };
+ }
+ }
+
+ function pushError() {
+ r.unshift(Buffer.allocUnsafe(1))
+ w.end()
+ const onerror = global.onerror
- function pushError() {
- tape.throws(function() {
- r.push(Buffer.alloc(1));
- });
+ global.onerror = () => {
+ t.ok(true)
+ global.onerror = onerror
+ return true
}
+ r.push(Buffer.allocUnsafe(1))
+ }
- var w = stream.Writable();
- var written = [];
- w._write = function(chunk, encoding, cb) {
- written.push(chunk.toString());
- cb();
- };
-
- var ended = false;
- r.on('end', function() {
- tape.notOk(ended, 'end emitted more than once');
- tape.throws(function() {
- r.unshift(Buffer.alloc(1));
- });
- ended = true;
- w.end();
- });
-
- r.on('readable', function() {
- var chunk;
- while (null !== (chunk = r.read(10))) {
- w.write(chunk);
- if (chunk.length > 4)
- r.unshift(Buffer.from('1234'));
+ const w = stream.Writable()
+ const written = []
+
+ w._write = function (chunk, encoding, cb) {
+ written.push(chunk.toString())
+ cb()
+ }
+
+ r.on('end', t.fail)
+ r.on('readable', function () {
+ let chunk
+
+ while ((chunk = r.read(10)) !== null) {
+ w.write(chunk)
+
+ if (chunk.length > 4) {
+ r.unshift(Buffer.from('1234'))
}
- });
-
- w.on('finish', function() {
- // each chunk should start with 1234, and then be asfdasdfasdf...
- // The first got pulled out before the first unshift('1234'), so it's
- // lacking that piece.
- tape.equal(written[0], 'asdfasdfas');
- var asdf = 'd';
- //console.error('0: %s', written[0]);
- for (var i = 1; i < written.length; i++) {
- //console.error('%s: %s', i.toString(32), written[i]);
- tape.equal(written[i].slice(0, 4), '1234');
- for (var j = 4; j < written[i].length; j++) {
- var c = written[i].charAt(j);
- tape.equal(c, asdf);
- switch (asdf) {
- case 'a': asdf = 's'; break;
- case 's': asdf = 'd'; break;
- case 'd': asdf = 'f'; break;
- case 'f': asdf = 'a'; break;
- }
+ }
+ })
+ w.on('finish', function () {
+ // each chunk should start with 1234, and then be asfdasdfasdf...
+ // The first got pulled out before the first unshift('1234'), so it's
+ // lacking that piece.
+ t.equal(written[0], 'asdfasdfas')
+ let asdf = 'd' // console.error('0: %s', written[0]);
+
+ for (let i = 1; i < written.length; i++) {
+ // console.error('%s: %s', i.toString(32), written[i]);
+ t.equal(written[i].slice(0, 4), '1234')
+
+ for (let j = 4; j < written[i].length; j++) {
+ const c = written[i].charAt(j)
+ t.equal(c, asdf)
+
+ switch (asdf) {
+ case 'a':
+ asdf = 's'
+ break
+
+ case 's':
+ asdf = 'd'
+ break
+
+ case 'd':
+ asdf = 'f'
+ break
+
+ case 'f':
+ asdf = 'a'
+ break
}
}
- tape.equal(written.length, 18);
- tape.end();
- });
+ }
- });
+ t.equal(written.length, 18)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-unshift-read-race'
diff --git a/test/browser/test-stream-writable-change-default-encoding.js b/test/browser/test-stream-writable-change-default-encoding.js
index de657152af..54426e56d3 100644
--- a/test/browser/test-stream-writable-change-default-encoding.js
+++ b/test/browser/test-stream-writable-change-default-encoding.js
@@ -1,64 +1,77 @@
-'use strict';
-var common = require('../common');
+'use strict'
-var stream = require('../../');
-var inherits = require('inherits');
+const inherits = require('inherits')
-function MyWritable(fn, options) {
- stream.Writable.call(this, options);
- this.fn = fn;
-};
+const stream = require('../../lib/ours/index')
-inherits(MyWritable, stream.Writable);
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
-MyWritable.prototype._write = function(chunk, encoding, callback) {
- this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding);
- callback();
-};
+inherits(MyWritable, stream.Writable)
-function defaultCondingIsUtf8(t) {
- t.plan(1);
- var m = new MyWritable(function(isBuffer, type, enc) {
- t.equal(enc, 'utf8');
- }, { decodeStrings: false });
- m.write('foo');
- m.end();
+MyWritable.prototype._write = function (chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
}
-function changeDefaultEncodingToAscii(t) {
- t.plan(1);
- var m = new MyWritable(function(isBuffer, type, enc) {
- t.equal(enc, 'ascii');
- }, { decodeStrings: false });
- m.setDefaultEncoding('ascii');
- m.write('bar');
- m.end();
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options)
+ this.fn = fn
}
-function changeDefaultEncodingToInvalidValue(t) {
- t.plan(1);
- t.throws(function () {
- var m = new MyWritable(function(isBuffer, type, enc) {
- }, { decodeStrings: false });
- m.setDefaultEncoding({});
- m.write('bar');
- m.end();
- }, TypeError);
-}
-function checkVairableCaseEncoding(t) {
- t.plan(1);
- var m = new MyWritable(function(isBuffer, type, enc) {
- t.equal(enc, 'ascii');
- }, { decodeStrings: false });
- m.setDefaultEncoding('AsCii');
- m.write('bar');
- m.end();
-}
-module.exports = function (t) {
- t.test('writable change default encoding', function (t) {
- t.test('defaultCondingIsUtf8', defaultCondingIsUtf8);
- t.test('changeDefaultEncodingToAscii', changeDefaultEncodingToAscii);
- t.test('changeDefaultEncodingToInvalidValue', changeDefaultEncodingToInvalidValue);
- t.test('checkVairableCaseEncoding', checkVairableCaseEncoding);
- });
+module.exports = function (test) {
+ test('defaultCondingIsUtf8', (t) => {
+ t.plan(1)
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'utf8')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.write('foo')
+ m.end()
+ })
+ test('changeDefaultEncodingToAscii', (t) => {
+ t.plan(1)
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'ascii')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.setDefaultEncoding('ascii')
+ m.write('bar')
+ m.end()
+ })
+ test('changeDefaultEncodingToInvalidValue', (t) => {
+ t.plan(1)
+ t.throws(function () {
+ const m = new MyWritable(function (isBuffer, type, enc) {}, {
+ decodeStrings: false
+ })
+ m.setDefaultEncoding({})
+ m.write('bar')
+ m.end()
+ }, TypeError)
+ })
+ test('checkVairableCaseEncoding', (t) => {
+ t.plan(1)
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'ascii')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.setDefaultEncoding('AsCii')
+ m.write('bar')
+ m.end()
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writable-change-default-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-writable-constructor-set-methods.js b/test/browser/test-stream-writable-constructor-set-methods.js
index e39bdbfaa6..d8beab46b4 100644
--- a/test/browser/test-stream-writable-constructor-set-methods.js
+++ b/test/browser/test-stream-writable-constructor-set-methods.js
@@ -1,40 +1,43 @@
-'use strict';
-var common = require('../common');
-var Writable = require('../../').Writable;
+'use strict'
+
+const { Writable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
module.exports = function (t) {
- t.test('writable constructor set methods', function (t){
-
-
- var _writeCalled = false;
- function _write(d, e, n) {
- _writeCalled = true;
- }
-
- var w = new Writable({ write: _write });
- w.end(Buffer.from('blerg'));
-
- var _writevCalled = false;
- var dLength = 0;
- function _writev(d, n) {
- dLength = d.length;
- _writevCalled = true;
- }
-
- var w2 = new Writable({ writev: _writev });
- w2.cork();
-
- w2.write(Buffer.from('blerg'));
- w2.write(Buffer.from('blerg'));
- w2.end();
-
- setImmediate(function() {
- t.equal(w._write, _write);
- t.ok(_writeCalled);
- t.equal(w2._writev, _writev);
- t.equal(dLength, 2);
- t.ok(_writevCalled);
- t.end();
- });
- });
+ t.plan(5)
+ let _writeCalled = false
+
+ function _write(d, e, n) {
+ _writeCalled = true
+ }
+
+ const w = new Writable({
+ write: _write
+ })
+ w.end(Buffer.from('blerg'))
+ let _writevCalled = false
+ let dLength = 0
+
+ function _writev(d, n) {
+ dLength = d.length
+ _writevCalled = true
+ }
+
+ const w2 = new Writable({
+ writev: _writev
+ })
+ w2.cork()
+ w2.write(Buffer.from('blerg'))
+ w2.write(Buffer.from('blerg'))
+ w2.end()
+ setImmediate(function () {
+ t.equal(w._write, _write)
+ t.ok(_writeCalled)
+ t.equal(w2._writev, _writev)
+ t.equal(dLength, 2)
+ t.ok(_writevCalled)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writable-constructor-set-methods'
diff --git a/test/browser/test-stream-writable-decoded-encoding.js b/test/browser/test-stream-writable-decoded-encoding.js
index f32dd7ef63..521b71fe6c 100644
--- a/test/browser/test-stream-writable-decoded-encoding.js
+++ b/test/browser/test-stream-writable-decoded-encoding.js
@@ -1,45 +1,55 @@
-'use strict';
-var common = require('../common');
+'use strict'
-var stream = require('../../');
-var inherits = require('inherits');
+const inherits = require('inherits')
+
+const stream = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
function MyWritable(fn, options) {
- stream.Writable.call(this, options);
- this.fn = fn;
-};
-
-inherits(MyWritable, stream.Writable);
-
-MyWritable.prototype._write = function(chunk, encoding, callback) {
- this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding);
- callback();
-};
-
-function decodeStringsTrue(t) {
- t.plan(3);
- var m = new MyWritable(function(isBuffer, type, enc) {
- t.ok(isBuffer);
- t.equal(type, 'object');
- t.equal(enc, 'buffer');
- //console.log('ok - decoded string is decoded');
- }, { decodeStrings: true });
- m.write('some-text', 'utf8');
- m.end();
+ stream.Writable.call(this, options)
+ this.fn = fn
}
-function decodeStringsFalse(t) {
- t.plan(3);
- var m = new MyWritable(function(isBuffer, type, enc) {
- t.notOk(isBuffer);
- t.equal(type, 'string');
- t.equal(enc, 'utf8');
- //console.log('ok - un-decoded string is not decoded');
- }, { decodeStrings: false });
- m.write('some-text', 'utf8');
- m.end();
+inherits(MyWritable, stream.Writable)
+
+MyWritable.prototype._write = function (chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
}
-module.exports = function (t) {
- t.test('decodeStringsTrue', decodeStringsTrue);
- t.test('decodeStringsFalse', decodeStringsFalse);
+
+module.exports = function (test) {
+ test('decodeStringsTrue', (t) => {
+ t.plan(3)
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.ok(isBuffer)
+ t.equal(type, 'object')
+ t.equal(enc, 'buffer') // console.log('ok - decoded string is decoded');
+ },
+ {
+ decodeStrings: true
+ }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
+ })
+ test('decodeStringsFalse', (t) => {
+ t.plan(3)
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.notOk(isBuffer)
+ t.equal(type, 'string')
+ t.equal(enc, 'utf8') // console.log('ok - un-decoded string is not decoded');
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writable-decoded-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-writev.js b/test/browser/test-stream-writev.js
index b5e8b61383..f8966ee1d9 100644
--- a/test/browser/test-stream-writev.js
+++ b/test/browser/test-stream-writev.js
@@ -1,105 +1,140 @@
-'use strict';
-var common = require('../common');
+'use strict'
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var queue = [];
-for (var decode = 0; decode < 2; decode++) {
- for (var uncork = 0; uncork < 2; uncork++) {
- for (var multi = 0; multi < 2; multi++) {
- queue.push([!!decode, !!uncork, !!multi]);
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+const queue = []
+
+for (let decode = 0; decode < 2; decode++) {
+ for (let uncork = 0; uncork < 2; uncork++) {
+ for (let multi = 0; multi < 2; multi++) {
+ queue.push([!!decode, !!uncork, !!multi])
}
}
}
-module.exports = function (t) {
- t.test('writev', function (t) {
- queue.forEach(function (tr, i){
- t.test('round ' + i, test(tr[0], tr[1], tr[2]));
- });
- });
-}
-
-function test(decode, uncork, multi) {
+function runTest(decode, uncork, multi) {
return function (t) {
- //console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi);
- var counter = 0;
- var expectCount = 0;
+ t.plan(8) // console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi);
+
+ let counter = 0
+ let expectCount = 0
+
function cnt(msg) {
- expectCount++;
- var expect = expectCount;
- var called = false;
- return function(er) {
- if (er)
- throw er;
- called = true;
- counter++;
- t.equal(counter, expect);
- };
+ expectCount++
+ const expect = expectCount
+ return function (er) {
+ if (er) {
+ throw er
+ }
+
+ counter++
+ t.equal(counter, expect)
+ }
+ }
+
+ const w = new stream.Writable({
+ decodeStrings: decode
+ })
+
+ w._write = function (chunk, e, cb) {
+ t.ok(false, 'Should not call _write')
}
- var w = new stream.Writable({ decodeStrings: decode });
- w._write = function(chunk, e, cb) {
- t.ok(false, 'Should not call _write');
- };
-
- var expectChunks = decode ?
- [
- { encoding: 'buffer',
- chunk: [104, 101, 108, 108, 111, 44, 32] },
- { encoding: 'buffer',
- chunk: [119, 111, 114, 108, 100] },
- { encoding: 'buffer',
- chunk: [33] },
- { encoding: 'buffer',
- chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] },
- { encoding: 'buffer',
- chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173]}
- ] : [
- { encoding: 'ascii', chunk: 'hello, ' },
- { encoding: 'utf8', chunk: 'world' },
- { encoding: 'buffer', chunk: [33] },
- { encoding: 'binary', chunk: '\nand then...' },
- { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' }
- ];
-
- var actualChunks;
- w._writev = function(chunks, cb) {
- actualChunks = chunks.map(function(chunk) {
+ const expectChunks = decode
+ ? [
+ {
+ encoding: 'buffer',
+ chunk: [104, 101, 108, 108, 111, 44, 32]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [119, 111, 114, 108, 100]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [33]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173]
+ }
+ ]
+ : [
+ {
+ encoding: 'ascii',
+ chunk: 'hello, '
+ },
+ {
+ encoding: 'utf8',
+ chunk: 'world'
+ },
+ {
+ encoding: 'buffer',
+ chunk: [33]
+ },
+ {
+ encoding: 'binary',
+ chunk: '\nand then...'
+ },
+ {
+ encoding: 'hex',
+ chunk: 'facebea7deadbeefdecafbad'
+ }
+ ]
+ let actualChunks
+
+ w._writev = function (chunks, cb) {
+ actualChunks = chunks.map(function (chunk) {
return {
encoding: chunk.encoding,
- chunk: Buffer.isBuffer(chunk.chunk) ?
- Array.prototype.slice.call(chunk.chunk) : chunk.chunk
- };
- });
- cb();
- };
-
- w.cork();
- w.write('hello, ', 'ascii', cnt('hello'));
- w.write('world', 'utf8', cnt('world'));
+ chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk
+ }
+ })
+ cb()
+ }
- if (multi)
- w.cork();
+ w.cork()
+ w.write('hello, ', 'ascii', cnt('hello'))
+ w.write('world', 'utf8', cnt('world'))
- w.write(Buffer.from('!'), 'buffer', cnt('!'));
- w.write('\nand then...', 'binary', cnt('and then'));
+ if (multi) {
+ w.cork()
+ }
- if (multi)
- w.uncork();
+ w.write(Buffer.from('!'), 'buffer', cnt('!'))
+ w.write('\nand then...', 'binary', cnt('and then'))
- w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'));
+ if (multi) {
+ w.uncork()
+ }
- if (uncork)
- w.uncork();
+ w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'))
- w.end(cnt('end'));
+ if (uncork) {
+ w.uncork()
+ }
- w.on('finish', function() {
+ w.end(cnt('end'))
+ w.on('finish', function () {
// make sure finish comes after all the write cb
- cnt('finish')();
- t.deepEqual(expectChunks, actualChunks);
- t.end();
- });
+ cnt('finish')()
+ t.deepEqual(expectChunks, actualChunks)
+ })
}
}
+
+module.exports = function (test) {
+ for (let i = 0; i < queue.length; i++) {
+ const tr = queue[i]
+ test('round ' + i, runTest(tr[0], tr[1], tr[2]))
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writev'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-base64-single-char-read-end.js b/test/browser/test-stream2-base64-single-char-read-end.js
index 86b66f81d5..10f8378a9d 100644
--- a/test/browser/test-stream2-base64-single-char-read-end.js
+++ b/test/browser/test-stream2-base64-single-char-read-end.js
@@ -1,41 +1,41 @@
-'use strict';
-var common = require('../common');
-var R = require('../../lib/_stream_readable');
-var W = require('../../lib/_stream_writable');
-module.exports = function (t) {
- t.test('base64 single char read end', function (t) {
- t.plan(1);
- var src = new R({encoding: 'base64'});
- var dst = new W();
- var hasRead = false;
- var accum = [];
- var timeout;
+'use strict'
- src._read = function(n) {
- if(!hasRead) {
- hasRead = true;
- process.nextTick(function() {
- src.push(Buffer.from('1'));
- src.push(null);
- });
- };
- };
+const { Readable, Writable } = require('../../lib/ours/index')
- dst._write = function(chunk, enc, cb) {
- accum.push(chunk);
- cb();
- };
+const { kReadableStreamSuiteName } = require('./symbols')
- src.on('end', function() {
- t.equal(Buffer.concat(accum) + '', 'MQ==');
- clearTimeout(timeout);
- });
+module.exports = function (t) {
+ t.plan(1)
+ const src = new Readable({
+ encoding: 'base64'
+ })
+ const dst = new Writable()
+ let hasRead = false
+ const accum = []
- src.pipe(dst);
+ src._read = function (n) {
+ if (!hasRead) {
+ hasRead = true
+ process.nextTick(function () {
+ src.push(Buffer.from('1'))
+ src.push(null)
+ })
+ }
+ }
- timeout = setTimeout(function() {
- assert.fail('timed out waiting for _write');
- }, 100);
+ dst._write = function (chunk, enc, cb) {
+ accum.push(chunk)
+ cb()
+ }
-})
+ src.on('end', function () {
+ t.equal(Buffer.concat(accum) + '', 'MQ==')
+ clearTimeout(timeout)
+ })
+ src.pipe(dst)
+ const timeout = setTimeout(function () {
+ t.fail('timed out waiting for _write')
+ }, 100)
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-base64-single-char-read-end'
diff --git a/test/browser/test-stream2-compatibility.js b/test/browser/test-stream2-compatibility.js
index 954473b8b7..940f8d1b0e 100644
--- a/test/browser/test-stream2-compatibility.js
+++ b/test/browser/test-stream2-compatibility.js
@@ -1,33 +1,37 @@
-'use strict';
-var R = require('../../lib/_stream_readable');
-var inherits = require('inherits');
-var EE = require('events').EventEmitter;
+'use strict'
+
+const inherits = require('inherits')
+
+const { Readable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
+
module.exports = function (t) {
- t.test('compatibility', function (t) {
- t.plan(1);
+ t.plan(1)
+ let ondataCalled = 0
- var ondataCalled = 0;
+ function TestReader() {
+ Readable.apply(this)
+ this._buffer = Buffer.alloc(100)
- function TestReader() {
- R.apply(this);
- this._buffer = Buffer.alloc(100);
- this._buffer.fill('x');
+ this._buffer.fill('x')
- this.on('data', function() {
- ondataCalled++;
- });
- }
+ this.on('data', function () {
+ ondataCalled++
+ })
+ }
- inherits(TestReader, R);
+ inherits(TestReader, Readable)
- TestReader.prototype._read = function(n) {
- this.push(this._buffer);
- this._buffer = Buffer.alloc(0);
- };
+ TestReader.prototype._read = function (n) {
+ this.push(this._buffer)
+ this._buffer = Buffer.alloc(0)
+ }
- var reader = new TestReader();
- setTimeout(function() {
- t.equal(ondataCalled, 1);
- });
- });
+ setTimeout(function () {
+ t.equal(ondataCalled, 1)
+ })
+ new TestReader().read()
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-compatibility'
diff --git a/test/browser/test-stream2-large-read-stall.js b/test/browser/test-stream2-large-read-stall.js
index 02539a0b91..ee7ee033f5 100644
--- a/test/browser/test-stream2-large-read-stall.js
+++ b/test/browser/test-stream2-large-read-stall.js
@@ -1,62 +1,58 @@
-'use strict';
-var common = require('../common');
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
+
module.exports = function (t) {
- t.test('large object read stall', function (t) {
-
-// If everything aligns so that you do a read(n) of exactly the
-// remaining buffer, then make sure that 'end' still emits.
-
- var READSIZE = 100;
- var PUSHSIZE = 20;
- var PUSHCOUNT = 1000;
- var HWM = 50;
-
- var Readable = require('../../').Readable;
- var r = new Readable({
- highWaterMark: HWM
- });
- var rs = r._readableState;
-
- r._read = push;
-
- r.on('readable', function() {
- ;false && console.error('>> readable');
- do {
- ;false && console.error(' > read(%d)', READSIZE);
- var ret = r.read(READSIZE);
- ;false && console.error(' < %j (%d remain)', ret && ret.length, rs.length);
- } while (ret && ret.length === READSIZE);
-
- ;false && console.error('<< after read()',
- ret && ret.length,
- rs.needReadable,
- rs.length);
- });
-
- var endEmitted = false;
- r.on('end', function() {
- t.equal(pushes, PUSHCOUNT + 1);
- t.end();
- ;false && console.error('end');
- });
-
- var pushes = 0;
- function push() {
- if (pushes > PUSHCOUNT)
- return;
-
- if (pushes++ === PUSHCOUNT) {
- ;false && console.error(' push(EOF)');
- return r.push(null);
- }
-
- ;false && console.error(' push #%d', pushes);
- if (r.push(Buffer.alloc(PUSHSIZE)))
- setTimeout(push);
+ t.plan(1) // If everything aligns so that you do a read(n) of exactly the
+ // remaining buffer, then make sure that 'end' still emits.
+
+ const READSIZE = 100
+ const PUSHSIZE = 20
+ const PUSHCOUNT = 1000
+ const HWM = 50
+ const r = new Readable({
+ highWaterMark: HWM
+ })
+ const rs = r._readableState
+ r._read = push
+ r.on('readable', function () {
+ false && console.error('>> readable')
+ let ret
+
+ do {
+ false && console.error(' > read(%d)', READSIZE)
+ ret = r.read(READSIZE)
+ false && console.error(' < %j (%d remain)', ret && ret.length, rs.length)
+ } while (ret && ret.length === READSIZE)
+
+ false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length)
+ })
+ r.on('end', function () {
+ t.equal(pushes, PUSHCOUNT + 1)
+ false && console.error('end')
+ })
+ let pushes = 0
+
+ function push() {
+ if (pushes > PUSHCOUNT) {
+ return
+ }
+
+ if (pushes++ === PUSHCOUNT) {
+ false && console.error(' push(EOF)')
+ return r.push(null)
}
- // start the flow
- var ret = r.read(0);
+ false && console.error(' push #%d', pushes)
+
+ if (r.push(Buffer.alloc(PUSHSIZE))) {
+ setTimeout(push)
+ }
+ } // start the flow
- });
+ r.read(0)
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-large-read-stall'
diff --git a/test/browser/test-stream2-objects.js b/test/browser/test-stream2-objects.js
index 26a038b599..679636a6ad 100644
--- a/test/browser/test-stream2-objects.js
+++ b/test/browser/test-stream2-objects.js
@@ -1,306 +1,344 @@
-'use strict';
-var common = require('../common');
-var Readable = require('../../lib/_stream_readable');
-var Writable = require('../../lib/_stream_writable');
+'use strict'
-module.exports = function (t) {
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
-
- function toArray(callback) {
- var stream = new Writable({ objectMode: true });
- var list = [];
- stream.write = function(chunk) {
- list.push(chunk);
- };
-
- stream.end = function() {
- callback(list);
- };
-
- return stream;
+function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
}
+}
- function fromArray(list) {
- var r = new Readable({ objectMode: true });
- r._read = noop;
- forEach(list, function(chunk) {
- r.push(chunk);
- });
- r.push(null);
+function toArray(callback) {
+ const stream = new Writable({
+ objectMode: true
+ })
+ const list = []
- return r;
+ stream.write = function (chunk) {
+ list.push(chunk)
}
- function noop() {}
-
- t.test('can read objects from stream', function(t) {
- var r = fromArray([{ one: '1'}, { two: '2' }]);
-
- var v1 = r.read();
- var v2 = r.read();
- var v3 = r.read();
-
- t.deepEqual(v1, { one: '1' });
- t.deepEqual(v2, { two: '2' });
- t.deepEqual(v3, null);
-
- t.end();
- });
-
- t.test('can pipe objects into stream', function(t) {
- var r = fromArray([{ one: '1'}, { two: '2' }]);
-
- r.pipe(toArray(function(list) {
- t.deepEqual(list, [
- { one: '1' },
- { two: '2' }
- ]);
-
- t.end();
- }));
- });
-
- t.test('read(n) is ignored', function(t) {
- var r = fromArray([{ one: '1'}, { two: '2' }]);
-
- var value = r.read(2);
-
- t.deepEqual(value, { one: '1' });
-
- t.end();
- });
-
- t.test('can read objects from _read (sync)', function(t) {
- var r = new Readable({ objectMode: true });
- var list = [{ one: '1'}, { two: '2' }];
- r._read = function(n) {
- var item = list.shift();
- r.push(item || null);
- };
-
- r.pipe(toArray(function(list) {
- t.deepEqual(list, [
- { one: '1' },
- { two: '2' }
- ]);
-
- t.end();
- }));
- });
-
- t.test('can read objects from _read (async)', function(t) {
- var r = new Readable({ objectMode: true });
- var list = [{ one: '1'}, { two: '2' }];
- r._read = function(n) {
- var item = list.shift();
- process.nextTick(function() {
- r.push(item || null);
- });
- };
-
- r.pipe(toArray(function(list) {
- t.deepEqual(list, [
- { one: '1' },
- { two: '2' }
- ]);
-
- t.end();
- }));
- });
+ stream.end = function () {
+ callback(list)
+ }
- t.test('can read strings as objects', function(t) {
- var r = new Readable({
+ return stream
+}
+
+function fromArray(list) {
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+ forEach(list, function (chunk) {
+ r.push(chunk)
+ })
+ r.push(null)
+ return r
+}
+
+function noop() {}
+
+module.exports = function (test) {
+ test('can read objects from stream', function (t) {
+ t.plan(3)
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ const v1 = r.read()
+ const v2 = r.read()
+ const v3 = r.read()
+ t.deepEqual(v1, {
+ one: '1'
+ })
+ t.deepEqual(v2, {
+ two: '2'
+ })
+ t.deepEqual(v3, null)
+ })
+ test('can pipe objects into stream', function (t) {
+ t.plan(1)
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ })
+ test('read(n) is ignored', function (t) {
+ t.plan(1)
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ const value = r.read(2)
+ t.deepEqual(value, {
+ one: '1'
+ })
+ })
+ test('can read objects from _read (sync)', function (t) {
+ t.plan(1)
+ const r = new Readable({
objectMode: true
- });
- r._read = noop;
- var list = ['one', 'two', 'three'];
- forEach(list, function(str) {
- r.push(str);
- });
- r.push(null);
-
- r.pipe(toArray(function(array) {
- t.deepEqual(array, list);
-
- t.end();
- }));
- });
+ })
+ const list = [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ]
+
+ r._read = function (n) {
+ const item = list.shift()
+ r.push(item || null)
+ }
- t.test('read(0) for object streams', function(t) {
- var r = new Readable({
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ })
+ test('can read objects from _read (async)', function (t) {
+ t.plan(1)
+ const r = new Readable({
objectMode: true
- });
- r._read = noop;
-
- r.push('foobar');
- r.push(null);
-
- var v = r.read(0);
-
- r.pipe(toArray(function(array) {
- t.deepEqual(array, ['foobar']);
-
- t.end();
- }));
- });
+ })
+ const list = [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ]
+
+ r._read = function (n) {
+ const item = list.shift()
+ process.nextTick(function () {
+ r.push(item || null)
+ })
+ }
- t.test('falsey values', function(t) {
- var r = new Readable({
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ })
+ test('can read strings as objects', function (t) {
+ t.plan(1)
+ const r = new Readable({
objectMode: true
- });
- r._read = noop;
-
- r.push(false);
- r.push(0);
- r.push('');
- r.push(null);
-
- r.pipe(toArray(function(array) {
- t.deepEqual(array, [false, 0, '']);
-
- t.end();
- }));
- });
-
- t.test('high watermark _read', function(t) {
- var r = new Readable({
+ })
+ r._read = noop
+ const list = ['one', 'two', 'three']
+ forEach(list, function (str) {
+ r.push(str)
+ })
+ r.push(null)
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, list)
+ })
+ )
+ })
+ test('read(0) for object streams', function (t) {
+ t.plan(1)
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+ r.push('foobar')
+ r.push(null)
+ r.read(0)
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, ['foobar'])
+ })
+ )
+ })
+ test('falsey values', function (t) {
+ t.plan(1)
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+ r.push(false)
+ r.push(0)
+ r.push('')
+ r.push(null)
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, [false, 0, ''])
+ })
+ )
+ })
+ test('high watermark _read', function (t) {
+ t.plan(5)
+ const r = new Readable({
highWaterMark: 6,
objectMode: true
- });
- var calls = 0;
- var list = ['1', '2', '3', '4', '5', '6', '7', '8'];
-
- r._read = function(n) {
- calls++;
- };
-
- forEach(list, function(c) {
- r.push(c);
- });
-
- var v = r.read();
-
- t.equal(calls, 0);
- t.equal(v, '1');
-
- var v2 = r.read();
- t.equal(v2, '2');
-
- var v3 = r.read();
- t.equal(v3, '3');
+ })
+ let calls = 0
+ const list = ['1', '2', '3', '4', '5', '6', '7', '8']
- t.equal(calls, 1);
-
- t.end();
- });
+ r._read = function (n) {
+ calls++
+ }
- t.test('high watermark push', function(t) {
- var r = new Readable({
+ forEach(list, function (c) {
+ r.push(c)
+ })
+ const v = r.read()
+ t.equal(calls, 0)
+ t.equal(v, '1')
+ const v2 = r.read()
+ t.equal(v2, '2')
+ const v3 = r.read()
+ t.equal(v3, '3')
+ t.equal(calls, 1)
+ })
+ test('high watermark push', function (t) {
+ t.plan(6)
+ const r = new Readable({
highWaterMark: 6,
objectMode: true
- });
- r._read = function(n) {};
- for (var i = 0; i < 6; i++) {
- var bool = r.push(i);
- t.equal(bool, i === 5 ? false : true);
- }
-
- t.end();
- });
-
- t.test('can write objects to stream', function(t) {
- var w = new Writable({ objectMode: true });
-
- w._write = function(chunk, encoding, cb) {
- t.deepEqual(chunk, { foo: 'bar' });
- cb();
- };
-
- w.on('finish', function() {
- t.end();
- });
-
- w.write({ foo: 'bar' });
- w.end();
- });
+ })
- t.test('can write multiple objects to stream', function(t) {
- var w = new Writable({ objectMode: true });
- var list = [];
+ r._read = function (n) {}
- w._write = function(chunk, encoding, cb) {
- list.push(chunk);
- cb();
- };
-
- w.on('finish', function() {
- t.deepEqual(list, [0, 1, 2, 3, 4]);
-
- t.end();
- });
-
- w.write(0);
- w.write(1);
- w.write(2);
- w.write(3);
- w.write(4);
- w.end();
- });
-
- t.test('can write strings as objects', function(t) {
- var w = new Writable({
+ for (let i = 0; i < 6; i++) {
+ const bool = r.push(i)
+ t.equal(bool, i !== 5)
+ }
+ })
+ test('can write objects to stream', function (t) {
+ t.plan(1)
+ const w = new Writable({
objectMode: true
- });
- var list = [];
-
- w._write = function(chunk, encoding, cb) {
- list.push(chunk);
- process.nextTick(cb);
- };
-
- w.on('finish', function() {
- t.deepEqual(list, ['0', '1', '2', '3', '4']);
-
- t.end();
- });
+ })
- w.write('0');
- w.write('1');
- w.write('2');
- w.write('3');
- w.write('4');
- w.end();
- });
+ w._write = function (chunk, encoding, cb) {
+ t.deepEqual(chunk, {
+ foo: 'bar'
+ })
+ cb()
+ }
- t.test('buffers finish until cb is called', function(t) {
- var w = new Writable({
+ w.on('finish', function () {})
+ w.write({
+ foo: 'bar'
+ })
+ w.end()
+ })
+ test('can write multiple objects to stream', function (t) {
+ t.plan(1)
+ const w = new Writable({
objectMode: true
- });
- var called = false;
+ })
+ const list = []
- w._write = function(chunk, encoding, cb) {
- t.equal(chunk, 'foo');
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ cb()
+ }
- process.nextTick(function() {
- called = true;
- cb();
- });
- };
+ w.on('finish', function () {
+ t.deepEqual(list, [0, 1, 2, 3, 4])
+ })
+ w.write(0)
+ w.write(1)
+ w.write(2)
+ w.write(3)
+ w.write(4)
+ w.end()
+ })
+ test('can write strings as objects', function (t) {
+ t.plan(1)
+ const w = new Writable({
+ objectMode: true
+ })
+ const list = []
- w.on('finish', function() {
- t.equal(called, true);
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ process.nextTick(cb)
+ }
- t.end();
- });
+ w.on('finish', function () {
+ t.deepEqual(list, ['0', '1', '2', '3', '4'])
+ })
+ w.write('0')
+ w.write('1')
+ w.write('2')
+ w.write('3')
+ w.write('4')
+ w.end()
+ })
+ test('buffers finish until cb is called', function (t) {
+ t.plan(2)
+ const w = new Writable({
+ objectMode: true
+ })
+ let called = false
+
+ w._write = function (chunk, encoding, cb) {
+ t.equal(chunk, 'foo')
+ process.nextTick(function () {
+ called = true
+ cb()
+ })
+ }
- w.write('foo');
- w.end();
- });
+ w.on('finish', function () {
+ t.equal(called, true)
+ })
+ w.write('foo')
+ w.end()
+ })
+}
- function forEach (xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
- }
-};
+module.exports[kReadableStreamSuiteName] = 'stream2-objects'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-pipe-error-handling.js b/test/browser/test-stream2-pipe-error-handling.js
index ba212f7dcb..2dd3e31821 100644
--- a/test/browser/test-stream2-pipe-error-handling.js
+++ b/test/browser/test-stream2-pipe-error-handling.js
@@ -1,88 +1,91 @@
-'use strict';
-var common = require('../common');
-var assert = require('assert');
-var stream = require('../../');
-module.exports = function (t) {
- t.test('Error Listener Catches', function (t) {
- var count = 1000;
-
- var source = new stream.Readable();
- source._read = function(n) {
- n = Math.min(count, n);
- count -= n;
- source.push(Buffer.alloc(n));
- };
-
- var unpipedDest;
- source.unpipe = function(dest) {
- unpipedDest = dest;
- stream.Readable.prototype.unpipe.call(this, dest);
- };
-
- var dest = new stream.Writable();
- dest._write = function(chunk, encoding, cb) {
- cb();
- };
-
- source.pipe(dest);
-
- var gotErr = null;
- dest.on('error', function(err) {
- gotErr = err;
- });
-
- var unpipedSource;
- dest.on('unpipe', function(src) {
- unpipedSource = src;
- });
-
- var err = new Error('This stream turned into bacon.');
- dest.emit('error', err);
- t.strictEqual(gotErr, err);
- t.strictEqual(unpipedSource, source);
- t.strictEqual(unpipedDest, dest);
- t.end();
- });
-
- t.test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) {
- var count = 1000;
-
- var source = new stream.Readable();
- source._read = function(n) {
- n = Math.min(count, n);
- count -= n;
- source.push(Buffer.alloc(n));
- };
-
- var unpipedDest;
- source.unpipe = function(dest) {
- unpipedDest = dest;
- stream.Readable.prototype.unpipe.call(this, dest);
- };
-
- var dest = new stream.Writable();
- dest._write = function(chunk, encoding, cb) {
- cb();
- };
-
- source.pipe(dest);
-
- var unpipedSource;
- dest.on('unpipe', function(src) {
- unpipedSource = src;
- });
-
- var err = new Error('This stream turned into bacon.');
-
- var gotErr = null;
- try {
- dest.emit('error', err);
- } catch (e) {
- gotErr = e;
+'use strict'
+
+const stream = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('Error Listener Catches', function (t) {
+ t.plan(3)
+ let count = 1000
+ const source = new stream.Readable()
+
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.alloc(n))
+ }
+
+ let unpipedDest
+
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
+
+ const dest = new stream.Writable()
+
+ dest._write = function (chunk, encoding, cb) {
+ cb()
}
- t.strictEqual(gotErr, err);
- t.strictEqual(unpipedSource, source);
- t.strictEqual(unpipedDest, dest);
- t.end();
- });
+
+ source.pipe(dest)
+ let gotErr = null
+ dest.on('error', function (err) {
+ gotErr = err
+ })
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+ const err = new Error('This stream turned into bacon.')
+ dest.emit('error', err)
+ t.strictEqual(gotErr, err)
+ t.strictEqual(unpipedSource, source)
+ t.strictEqual(unpipedDest, dest)
+ })
+ test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) {
+ t.plan(3)
+ let count = 1000
+ const source = new stream.Readable()
+
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.alloc(n))
+ }
+
+ let unpipedDest
+
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
+
+ const dest = new stream.Writable()
+
+ dest._write = function (chunk, encoding, cb) {
+ cb()
+ }
+
+ source.pipe(dest)
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+ const err = new Error('This stream turned into bacon.')
+ const onerror = global.onerror
+ dest.emit('error', err)
+
+ global.onerror = () => {
+ t.ok(true)
+ t.strictEqual(unpipedSource, source)
+ t.strictEqual(unpipedDest, dest)
+ global.onerror = onerror
+ return true
+ }
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-handling'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-pipe-error-once-listener.js b/test/browser/test-stream2-pipe-error-once-listener.js
index 5f4a4e2686..c5696778a5 100644
--- a/test/browser/test-stream2-pipe-error-once-listener.js
+++ b/test/browser/test-stream2-pipe-error-once-listener.js
@@ -1,41 +1,43 @@
-'use strict';
-var common = require('../common');
+'use strict'
-var inherits = require('inherits');
-var stream = require('../../');
+const inherits = require('inherits')
+
+const stream = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
module.exports = function (t) {
- t.test('pipe error once listener', function (t){
- t.plan(1);
- var Read = function() {
- stream.Readable.call(this);
- };
- inherits(Read, stream.Readable);
-
- Read.prototype._read = function(size) {
- this.push('x');
- this.push(null);
- };
-
-
- var Write = function() {
- stream.Writable.call(this);
- };
- inherits(Write, stream.Writable);
-
- Write.prototype._write = function(buffer, encoding, cb) {
- this.emit('error', new Error('boom'));
- this.emit('alldone');
- };
-
- var read = new Read();
- var write = new Write();
-
- write.once('error', function(err) {});
- write.once('alldone', function(err) {
- t.ok(true);
- });
-
- read.pipe(write);
- });
+ t.plan(1)
+
+ const Read = function () {
+ stream.Readable.call(this)
+ }
+
+ inherits(Read, stream.Readable)
+
+ Read.prototype._read = function (size) {
+ this.push('x')
+ this.push(null)
+ }
+
+ const Write = function () {
+ stream.Writable.call(this)
+ }
+
+ inherits(Write, stream.Writable)
+
+ Write.prototype._write = function (buffer, encoding, cb) {
+ this.emit('error', new Error('boom'))
+ this.emit('alldone')
+ }
+
+ const read = new Read()
+ const write = new Write()
+ write.once('error', () => {})
+ write.once('alldone', function () {
+ t.ok(true)
+ })
+ read.pipe(write)
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-once-listener'
diff --git a/test/browser/test-stream2-push.js b/test/browser/test-stream2-push.js
index 7ca5f39ef3..0c3307f516 100644
--- a/test/browser/test-stream2-push.js
+++ b/test/browser/test-stream2-push.js
@@ -1,120 +1,114 @@
-'use strict';
-var common = require('../common');
-var stream = require('../../');
-var Readable = stream.Readable;
-var Writable = stream.Writable;
+'use strict'
+const { EventEmitter: EE } = require('events')
-var inherits = require('inherits');
-var EE = require('events').EventEmitter;
-module.exports = function (t) {
-
-// a mock thing a bit like the net.Socket/tcp_wrap.handle interaction
- t.test('push', function (t) {
- var stream = new Readable({
- highWaterMark: 16,
- encoding: 'utf8'
- });
-
- var source = new EE();
-
- stream._read = function() {
- //console.error('stream._read');
- readStart();
- };
-
- var ended = false;
- stream.on('end', function() {
- ended = true;
- });
-
- source.on('data', function(chunk) {
- var ret = stream.push(chunk);
- //console.error('data', stream._readableState.length);
- if (!ret)
- readStop();
- });
-
- source.on('end', function() {
- stream.push(null);
- });
-
- var reading = false;
-
- function readStart() {
- //console.error('readStart');
- reading = true;
- }
-
- function readStop() {
- //console.error('readStop');
- reading = false;
- process.nextTick(function() {
- var r = stream.read();
- if (r !== null)
- writer.write(r);
- });
- }
+const { Readable, Writable } = require('../../lib/ours/index')
- var writer = new Writable({
- decodeStrings: false
- });
-
- var written = [];
-
- var expectWritten =
- [ 'asdfgasdfgasdfgasdfg',
- 'asdfgasdfgasdfgasdfg',
- 'asdfgasdfgasdfgasdfg',
- 'asdfgasdfgasdfgasdfg',
- 'asdfgasdfgasdfgasdfg',
- 'asdfgasdfgasdfgasdfg' ];
-
- writer._write = function(chunk, encoding, cb) {
- //console.error('WRITE %s', chunk);
- written.push(chunk);
- process.nextTick(cb);
- };
-
- writer.on('finish', finish);
-
-
- // now emit some chunks.
-
- var chunk = 'asdfg';
-
- var set = 0;
- readStart();
- data();
- function data() {
- t.ok(reading);
- source.emit('data', chunk);
- t.ok(reading);
- source.emit('data', chunk);
- t.ok(reading);
- source.emit('data', chunk);
- t.ok(reading);
- source.emit('data', chunk);
- t.notOk(reading);
- if (set++ < 5)
- setTimeout(data, 10);
- else
- end();
- }
+const { kReadableStreamSuiteName } = require('./symbols')
- function finish() {
- //console.error('finish');
- t.deepEqual(written, expectWritten);
- t.end();
+module.exports = function (t) {
+ t.plan(33)
+ const stream = new Readable({
+ highWaterMark: 16,
+ encoding: 'utf8'
+ })
+ const source = new EE()
+
+ stream._read = function () {
+ // console.error('stream._read');
+ readStart()
+ }
+
+ let ended = false
+ stream.on('end', function () {
+ ended = true
+ })
+ source.on('data', function (chunk) {
+ const ret = stream.push(chunk) // console.error('data', stream._readableState.length);
+
+ if (!ret) {
+ readStop()
}
-
- function end() {
- source.emit('end');
- t.notOk(reading);
- writer.end(stream.read());
- setTimeout(function() {
- t.ok(ended);
- });
+ })
+ source.on('end', function () {
+ stream.push(null)
+ })
+ let reading = false
+
+ function readStart() {
+ // console.error('readStart');
+ reading = true
+ }
+
+ function readStop() {
+ // console.error('readStop');
+ reading = false
+ process.nextTick(function () {
+ const r = stream.read()
+
+ if (r !== null) {
+ writer.write(r)
+ }
+ })
+ }
+
+ const writer = new Writable({
+ decodeStrings: false
+ })
+ const written = []
+ const expectWritten = [
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg'
+ ]
+
+ writer._write = function (chunk, encoding, cb) {
+ // console.error('WRITE %s', chunk);
+ written.push(chunk)
+ process.nextTick(cb)
+ }
+
+ writer.on('finish', finish) // now emit some chunks.
+
+ const chunk = 'asdfg'
+ let set = 0
+ readStart()
+ data()
+
+ function data() {
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.notOk(reading)
+
+ if (set++ < 5) {
+ setTimeout(data, 10)
+ } else {
+ end()
}
- });
-};
+ }
+
+ function finish() {
+ // console.error('finish');
+ t.deepEqual(written, expectWritten)
+ }
+
+ function end() {
+ source.emit('end')
+ t.notOk(reading)
+ writer.end(stream.read())
+ setTimeout(function () {
+ t.ok(ended)
+ })
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-push'
diff --git a/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/test/browser/test-stream2-readable-empty-buffer-no-eof.js
index fd86d67bc5..1a7b611b36 100644
--- a/test/browser/test-stream2-readable-empty-buffer-no-eof.js
+++ b/test/browser/test-stream2-readable-empty-buffer-no-eof.js
@@ -1,14 +1,13 @@
-'use strict';
-var common = require('../common');
+'use strict'
-var Readable = require('../../').Readable;
+const { Readable } = require('../../lib/ours/index')
-module.exports = function (t) {
- t.test('readable empty buffer no eof 1', function (t) {
- t.plan(1);
- var r = new Readable();
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
- // should not end when we get a Buffer(0) or '' as the _read result
+module.exports = function (test) {
+ test('readable empty buffer no eof 1', function (t) {
+ t.plan(1)
+ const r = new Readable() // should not end when we get a Buffer(0) or '' as the _read result
// that just means that there is *temporarily* no data, but to go
// ahead and try again later.
//
@@ -18,74 +17,96 @@ module.exports = function (t) {
// r.read(0) again later, otherwise there is no more work being done
// and the process just exits.
- var buf = Buffer.alloc(5);
- buf.fill('x');
- var reads = 5;
- r._read = function(n) {
+ const buf = Buffer.alloc(5)
+ buf.fill('x')
+ let reads = 5
+
+ r._read = function (n) {
switch (reads--) {
case 0:
- return r.push(null); // EOF
+ return r.push(null)
+ // EOF
+
case 1:
- return r.push(buf);
+ return r.push(buf)
+
case 2:
- setTimeout(r.read.bind(r, 0), 50);
- return r.push(Buffer.alloc(0)); // Not-EOF!
+ setTimeout(r.read.bind(r, 0), 50)
+ return r.push(Buffer.alloc(0))
+ // Not-EOF!
+
case 3:
- setTimeout(r.read.bind(r, 0), 50);
- return process.nextTick(function() {
- return r.push(Buffer.alloc(0));
- });
+ setTimeout(r.read.bind(r, 0), 50)
+ return process.nextTick(function () {
+ return r.push(Buffer.alloc(0))
+ })
+
case 4:
- setTimeout(r.read.bind(r, 0), 50);
- return setTimeout(function() {
- return r.push(Buffer.alloc(0));
- });
+ setTimeout(r.read.bind(r, 0), 50)
+ return setTimeout(function () {
+ return r.push(Buffer.alloc(0))
+ })
+
case 5:
- return setTimeout(function() {
- return r.push(buf);
- });
+ return setTimeout(function () {
+ return r.push(buf)
+ })
+
default:
- throw new Error('unreachable');
+ throw new Error('unreachable')
}
- };
+ }
+
+ const results = []
- var results = [];
function flow() {
- var chunk;
- while (null !== (chunk = r.read()))
- results.push(chunk + '');
+ let chunk
+
+ while ((chunk = r.read()) !== null) {
+ results.push(chunk + '')
+ }
}
- r.on('readable', flow);
- r.on('end', function() {
- results.push('EOF');
- t.deepEqual(results, [ 'xxxxx', 'xxxxx', 'EOF' ]);
- });
- flow();
-
- });
-
- t.test('readable empty buffer no eof 2', function (t) {
- t.plan(1);
- var r = new Readable({ encoding: 'base64' });
- var reads = 5;
- r._read = function(n) {
- if (!reads--)
- return r.push(null); // EOF
- else
- return r.push(Buffer.from('x'));
- };
-
- var results = [];
+
+ r.on('readable', flow)
+ r.on('end', function () {
+ results.push('EOF')
+ t.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF'])
+ })
+ flow()
+ })
+ test('readable empty buffer no eof 2', function (t) {
+ t.plan(1)
+ const r = new Readable({
+ encoding: 'base64'
+ })
+ let reads = 5
+
+ r._read = function (n) {
+ if (!reads--) {
+ return r.push(null) // EOF
+ } else {
+ return r.push(Buffer.from('x'))
+ }
+ }
+
+ const results = []
+
function flow() {
- var chunk;
- while (null !== (chunk = r.read()))
- results.push(chunk + '');
+ let chunk
+
+ while ((chunk = r.read()) !== null) {
+ results.push(chunk + '')
+ }
}
- r.on('readable', flow);
- r.on('end', function() {
- results.push('EOF');
- t.deepEqual(results, [ 'eHh4', 'eHg=', 'EOF' ]);
- });
- flow();
- });
+
+ r.on('readable', flow)
+ r.on('end', function () {
+ results.push('EOF')
+ t.deepEqual(results, ['eHh4', 'eHg=', 'EOF'])
+ })
+ flow()
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-empty-buffer-no-eof'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-readable-from-list.js b/test/browser/test-stream2-readable-from-list.js
index 7522b05692..92c607d86b 100644
--- a/test/browser/test-stream2-readable-from-list.js
+++ b/test/browser/test-stream2-readable-from-list.js
@@ -1,68 +1,89 @@
-// Flags: --expose_internals
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-require('../common');
-var fromList = require('../../lib/_stream_readable')._fromList;
-var BufferList = require('../../lib/internal/streams/BufferList');
+'use strict'
-function bufferListFromArray(arr) {
- var bl = new BufferList();
- for (var i = 0; i < arr.length; ++i) {
- bl.push(arr[i]);
- }return bl;
-}
+const { _fromList: fromList } = require('../../lib/_stream_readable')
-module.exports = function (t) {
- t.test('buffers', function (t) {
- var list = [bufferShim.from('foog'), bufferShim.from('bark'), bufferShim.from('bazy'), bufferShim.from('kuel')];
- list = bufferListFromArray(list);
+const BufferList = require('../../lib/internal/streams/buffer_list')
- // read more than the first element.
- var ret = fromList(6, { buffer: list, length: 16 });
- t.equal(ret.toString(), 'foogba');
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
- // read exactly the first element.
- ret = fromList(2, { buffer: list, length: 10 });
- t.equal(ret.toString(), 'rk');
+function bufferListFromArray(arr) {
+ const bl = new BufferList()
- // read less than the first element.
- ret = fromList(2, { buffer: list, length: 8 });
- t.equal(ret.toString(), 'ba');
+ for (let i = 0; i < arr.length; ++i) {
+ bl.push(arr[i])
+ }
- // read more than we have.
- ret = fromList(100, { buffer: list, length: 6 });
- t.equal(ret.toString(), 'zykuel');
+ return bl
+}
- // all consumed.
- t.same(list, new BufferList());
+module.exports = function (test) {
+ test('buffers', function (t) {
+ t.plan(5)
+ let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')]
+ list = bufferListFromArray(list) // read more than the first element.
- t.end();
- });
+ let ret = fromList(6, {
+ buffer: list,
+ length: 16
+ })
+ t.equal(ret.toString(), 'foogba') // read exactly the first element.
- t.test('strings', function (t) {
- var list = ['foog', 'bark', 'bazy', 'kuel'];
- list = bufferListFromArray(list);
+ ret = fromList(2, {
+ buffer: list,
+ length: 10
+ })
+ t.equal(ret.toString(), 'rk') // read less than the first element.
- // read more than the first element.
- var ret = fromList(6, { buffer: list, length: 16, decoder: true });
- t.equal(ret, 'foogba');
+ ret = fromList(2, {
+ buffer: list,
+ length: 8
+ })
+ t.equal(ret.toString(), 'ba') // read more than we have.
- // read exactly the first element.
- ret = fromList(2, { buffer: list, length: 10, decoder: true });
- t.equal(ret, 'rk');
+ ret = fromList(100, {
+ buffer: list,
+ length: 6
+ })
+ t.equal(ret.toString(), 'zykuel') // all consumed.
- // read less than the first element.
- ret = fromList(2, { buffer: list, length: 8, decoder: true });
- t.equal(ret, 'ba');
+ t.same(list, new BufferList())
+ })
+ test('strings', function (t) {
+ t.plan(5)
+ let list = ['foog', 'bark', 'bazy', 'kuel']
+ list = bufferListFromArray(list) // read more than the first element.
- // read more than we have.
- ret = fromList(100, { buffer: list, length: 6, decoder: true });
- t.equal(ret, 'zykuel');
+ let ret = fromList(6, {
+ buffer: list,
+ length: 16,
+ decoder: true
+ })
+ t.equal(ret, 'foogba') // read exactly the first element.
- // all consumed.
- t.same(list, new BufferList());
+ ret = fromList(2, {
+ buffer: list,
+ length: 10,
+ decoder: true
+ })
+ t.equal(ret, 'rk') // read less than the first element.
- t.end();
- });
+ ret = fromList(2, {
+ buffer: list,
+ length: 8,
+ decoder: true
+ })
+ t.equal(ret, 'ba') // read more than we have.
+
+ ret = fromList(100, {
+ buffer: list,
+ length: 6,
+ decoder: true
+ })
+ t.equal(ret, 'zykuel') // all consumed.
+
+ t.same(list, new BufferList())
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-from-list'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-readable-legacy-drain.js b/test/browser/test-stream2-readable-legacy-drain.js
index 30fd350f97..4bff8d7afc 100644
--- a/test/browser/test-stream2-readable-legacy-drain.js
+++ b/test/browser/test-stream2-readable-legacy-drain.js
@@ -1,52 +1,49 @@
-'use strict';
-var common = require('../common');
+'use strict'
+
+const { Stream, Readable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
-var Stream = require('../../');
-var Readable = require('../../').Readable;
module.exports = function (t) {
- t.test('readable legacy drain', function (t) {
- var r = new Readable();
- var N = 256;
- var reads = 0;
- r._read = function(n) {
- return r.push(++reads === N ? null : Buffer.alloc(1));
- };
- t.plan(2);
- r.on('end', function() {
- t.ok(true, 'rended');
- });
-
- var w = new Stream();
- w.writable = true;
- var writes = 0;
- var buffered = 0;
- w.write = function(c) {
- writes += c.length;
- buffered += c.length;
- process.nextTick(drain);
- return false;
- };
-
- function drain() {
- if(buffered > 3) {
- t.ok(false, 'to much buffer');
- }
- buffered = 0;
- w.emit('drain');
- }
+ t.plan(3)
+ const r = new Readable()
+ const N = 256
+ let reads = 0
+
+ r._read = function (n) {
+ return r.push(++reads === N ? null : Buffer.alloc(1))
+ }
+ r.on('end', function () {
+ t.ok(true, 'rended')
+ })
+ const w = new Stream()
+ w.writable = true
+ let writes = 0
+ let buffered = 0
- w.end = function() {
- t.ok(true, 'wended');
- };
+ w.write = function (c) {
+ writes += c.length
+ buffered += c.length
+ process.nextTick(drain)
+ return false
+ }
- // Just for kicks, let's mess with the drain count.
- // This verifies that even if it gets negative in the
- // pipe() cleanup function, we'll still function properly.
- r.on('readable', function() {
- w.emit('drain');
- });
+ function drain() {
+ if (buffered > 3) {
+ t.ok(false, 'to much buffer')
+ }
+
+ buffered = 0
+ w.emit('drain')
+ }
- r.pipe(w);
-});
+ w.end = function () {
+ t.equal(writes, 255)
+ t.ok(true, 'wended')
+ }
+
+ r.pipe(w)
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-legacy-drain'
diff --git a/test/browser/test-stream2-readable-non-empty-end.js b/test/browser/test-stream2-readable-non-empty-end.js
index 2a6d4f0f32..7a3e56b773 100644
--- a/test/browser/test-stream2-readable-non-empty-end.js
+++ b/test/browser/test-stream2-readable-non-empty-end.js
@@ -1,57 +1,63 @@
-'use strict';
-var common = require('../common');
-var Readable = require('../../lib/_stream_readable');
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName } = require('./symbols')
+
module.exports = function (t) {
- t.test('non empty end', function (t) {
- t.plan(4);
- var len = 0;
- var chunks = new Array(10);
- for (var i = 1; i <= 10; i++) {
- chunks[i - 1] = Buffer.alloc(i);
- len += i;
- }
+ t.plan(4)
+ let len = 0
+ const chunks = new Array(10)
- var test = new Readable();
- var n = 0;
- test._read = function(size) {
- var chunk = chunks[n++];
- setTimeout(function() {
- test.push(chunk === undefined ? null : chunk);
- });
- };
-
- test.on('end', thrower);
- function thrower() {
- throw new Error('this should not happen!');
- }
+ for (let i = 1; i <= 10; i++) {
+ chunks[i - 1] = Buffer.alloc(i)
+ len += i
+ }
+
+ const test = new Readable()
+ let n = 0
+
+ test._read = function (size) {
+ const chunk = chunks[n++]
+ setTimeout(function () {
+ test.push(chunk === undefined ? null : chunk)
+ })
+ }
+
+ test.on('end', thrower)
- var bytesread = 0;
- test.on('readable', function() {
- var b = len - bytesread - 1;
- var res = test.read(b);
- if (res) {
- bytesread += res.length;
- //console.error('br=%d len=%d', bytesread, len);
- setTimeout(next);
- }
- test.read(0);
- });
- test.read(0);
-
- function next() {
- // now let's make 'end' happen
- test.removeListener('end', thrower);
-
- test.on('end', function() {
- t.ok(true, 'end emitted');
- });
-
- // one to get the last byte
- var r = test.read();
- t.ok(r);
- t.equal(r.length, 1);
- r = test.read();
- t.equal(r, null);
+ function thrower() {
+ throw new Error('this should not happen!')
+ }
+
+ let bytesread = 0
+ test.on('readable', function () {
+ const b = len - bytesread - 1
+ const res = test.read(b)
+
+ if (res) {
+ bytesread += res.length // console.error('br=%d len=%d', bytesread, len);
+
+ setTimeout(next)
}
- });
+
+ test.read(0)
+ })
+ test.read(0)
+
+ function next() {
+ // now let's make 'end' happen
+ test.removeListener('end', thrower)
+ test.on('end', function () {
+ t.ok(true, 'end emitted')
+ }) // one to get the last byte
+
+ let r = test.read()
+ t.ok(r)
+ t.equal(r.length, 1)
+ r = test.read()
+ t.equal(r, null)
+ }
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-non-empty-end'
diff --git a/test/browser/test-stream2-readable-wrap-empty.js b/test/browser/test-stream2-readable-wrap-empty.js
index d13bbbadbe..40b4b8db87 100644
--- a/test/browser/test-stream2-readable-wrap-empty.js
+++ b/test/browser/test-stream2-readable-wrap-empty.js
@@ -1,24 +1,26 @@
-'use strict';
-var common = require('../common');
+'use strict'
-var Readable = require('../../lib/_stream_readable');
-var EE = require('events').EventEmitter;
-module.exports = function (t) {
- t.test('wrap empty', function (t) {
- t.plan(1);
- var oldStream = new EE();
- oldStream.pause = function() {};
- oldStream.resume = function() {};
+const { EventEmitter: EE } = require('events')
+
+const Readable = require('../../lib/ours/index')
- var newStream = new Readable().wrap(oldStream);
+const { kReadableStreamSuiteName } = require('./symbols')
- newStream
- .on('readable', function() {})
- .on('end', function() {
- t.ok(true, 'ended');
- });
+module.exports = function (t) {
+ t.plan(1)
+ const oldStream = new EE()
- oldStream.emit('end');
+ oldStream.pause = function () {}
- })
+ oldStream.resume = function () {}
+
+ const newStream = new Readable().wrap(oldStream)
+ newStream
+ .on('readable', function () {})
+ .on('end', function () {
+ t.ok(true, 'ended')
+ })
+ oldStream.emit('end')
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap-empty'
diff --git a/test/browser/test-stream2-readable-wrap.js b/test/browser/test-stream2-readable-wrap.js
index b50a786e5b..73a21b400a 100644
--- a/test/browser/test-stream2-readable-wrap.js
+++ b/test/browser/test-stream2-readable-wrap.js
@@ -1,86 +1,117 @@
-'use strict';
-var common = require('../common');
-
-var Readable = require('../../lib/_stream_readable');
-var Writable = require('../../lib/_stream_writable');
-var EE = require('events').EventEmitter;
-var run = 0;
-function runTest(t, highWaterMark, objectMode, produce) {
- t.test('run #' + (++run), function (t) {
- var old = new EE();
- var r = new Readable({ highWaterMark: highWaterMark,
- objectMode: objectMode });
- t.equal(r, r.wrap(old));
-
- var ended = false;
- r.on('end', function() {
- ended = true;
- });
-
- old.pause = function() {
- //console.error('old.pause()');
- old.emit('pause');
- flowing = false;
- };
-
- old.resume = function() {
- //console.error('old.resume()');
- old.emit('resume');
- flow();
- };
-
- var flowing;
- var chunks = 10;
- var oldEnded = false;
- var expected = [];
- function flow() {
- flowing = true;
- while (flowing && chunks-- > 0) {
- var item = produce();
- expected.push(item);
- //console.log('old.emit', chunks, flowing);
- old.emit('data', item);
- //console.log('after emit', chunks, flowing);
+'use strict'
+
+const { EventEmitter: EE } = require('events')
+
+const { Readable, Writable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+let run = 0
+
+module.exports = function (test) {
+ function runTest(highWaterMark, objectMode, produce) {
+ test('run #' + ++run, (t) => {
+ t.plan(4)
+ const old = new EE()
+ const r = new Readable({
+ highWaterMark,
+ objectMode
+ })
+ t.equal(r, r.wrap(old))
+ let ended = false
+ r.on('end', function () {
+ ended = true
+ })
+
+ old.pause = function () {
+ // console.error('old.pause()');
+ old.emit('pause')
+ flowing = false
}
- if (chunks <= 0) {
- oldEnded = true;
- //console.log('old end', chunks, flowing);
- old.emit('end');
+
+ old.resume = function () {
+ // console.error('old.resume()');
+ old.emit('resume')
+ flow()
}
- }
- var w = new Writable({ highWaterMark: highWaterMark * 2,
- objectMode: objectMode });
- var written = [];
- w._write = function(chunk, encoding, cb) {
- //console.log('_write', chunk);
- written.push(chunk);
- setTimeout(cb);
- };
+ let flowing
+ let chunks = 10
+ let oldEnded = false
+ const expected = []
- w.on('finish', function() {
- performAsserts();
- });
+ function flow() {
+ flowing = true // eslint-disable-next-line no-unmodified-loop-condition
- r.pipe(w);
+ while (flowing && chunks-- > 0) {
+ const item = produce()
+ expected.push(item) // console.log('old.emit', chunks, flowing);
- flow();
+ old.emit('data', item) // console.log('after emit', chunks, flowing);
+ }
- function performAsserts() {
- t.ok(ended);
- t.ok(oldEnded);
- t.deepEqual(written, expected);
- t.end();
+ if (chunks <= 0) {
+ oldEnded = true // console.log('old end', chunks, flowing);
+
+ old.emit('end')
+ }
+ }
+
+ const w = new Writable({
+ highWaterMark: highWaterMark * 2,
+ objectMode
+ })
+ const written = []
+
+ w._write = function (chunk, encoding, cb) {
+ // console.log('_write', chunk);
+ written.push(chunk)
+ setTimeout(cb)
+ }
+
+ w.on('finish', function () {
+ performAsserts()
+ })
+ r.pipe(w)
+ flow()
+
+ function performAsserts() {
+ t.ok(ended)
+ t.ok(oldEnded)
+ t.deepEqual(written, expected)
+ }
+ })
+ }
+
+ runTest(100, false, function () {
+ return Buffer.alloc(100)
+ })
+ runTest(10, false, function () {
+ return Buffer.from('xxxxxxxxxx')
+ })
+ runTest(1, true, function () {
+ return {
+ foo: 'bar'
}
- });
-}
-module.exports = function (t) {
- t.test('readable wrap', function (t) {
- runTest(t, 100, false, function() { return Buffer.alloc(100); });
- runTest(t, 10, false, function() { return Buffer.from('xxxxxxxxxx'); });
- runTest(t, 1, true, function() { return { foo: 'bar' }; });
-
- var objectChunks = [ 5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555 ];
- runTest(t, 1, true, function() { return objectChunks.shift(); });
- });
+ })
+ const objectChunks = [
+ 5,
+ 'a',
+ false,
+ 0,
+ '',
+ 'xyz',
+ {
+ x: 4
+ },
+ 7,
+ [],
+ 555
+ ]
+ runTest(1, true, function () {
+ return objectChunks.shift()
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-set-encoding.js b/test/browser/test-stream2-set-encoding.js
index ba70748fa5..c1ef6cd2eb 100644
--- a/test/browser/test-stream2-set-encoding.js
+++ b/test/browser/test-stream2-set-encoding.js
@@ -1,317 +1,323 @@
-'use strict';
-var common = require('../common');
-var R = require('../../lib/_stream_readable');
-var util = {
- inherits: require('inherits')
-};
+'use strict'
-// tiny node-tap lookalike.
-module.exports = function (t) {
- var test = t.test;
- /////
+const inherits = require('inherits')
- util.inherits(TestReader, R);
+const { Readable } = require('../../lib/ours/index')
- function TestReader(n, opts) {
- R.call(this, opts);
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
- this.pos = 0;
- this.len = n || 100;
- }
+inherits(TestReader, Readable)
- TestReader.prototype._read = function(n) {
- setTimeout(function() {
+function TestReader(n, opts) {
+ Readable.call(this, opts)
+ this.pos = 0
+ this.len = n || 100
+}
+TestReader.prototype._read = function (n) {
+ setTimeout(
+ function () {
if (this.pos >= this.len) {
// double push(null) to test eos handling
- this.push(null);
- return this.push(null);
+ this.push(null)
+ return this.push(null)
}
- n = Math.min(n, this.len - this.pos);
+ n = Math.min(n, this.len - this.pos)
+
if (n <= 0) {
// double push(null) to test eos handling
- this.push(null);
- return this.push(null);
+ this.push(null)
+ return this.push(null)
}
- this.pos += n;
- var ret = Buffer.alloc(n);
- ret.fill('a');
-
- //console.log('this.push(ret)', ret);
-
- return this.push(ret);
- }.bind(this), 1);
- };
+ this.pos += n
+ const ret = Buffer.alloc(n)
+ ret.fill('a') // console.log('this.push(ret)', ret);
- test('setEncoding utf8', function(t) {
- var tr = new TestReader(100);
- tr.setEncoding('utf8');
- var out = [];
- var expect =
- [ 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa' ];
+ return this.push(ret)
+ }.bind(this),
+ 1
+ )
+}
+module.exports = function (test) {
+ test('setEncoding utf8', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100)
+ tr.setEncoding('utf8')
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
tr.on('readable', function flow() {
- var chunk;
- while (null !== (chunk = tr.read(10)))
- out.push(chunk);
- });
-
- tr.on('end', function() {
- t.same(out, expect);
- t.end();
- });
- });
-
-
- test('setEncoding hex', function(t) {
- var tr = new TestReader(100);
- tr.setEncoding('hex');
- var out = [];
- var expect =
- [ '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161' ];
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('setEncoding hex', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
tr.on('readable', function flow() {
- var chunk;
- while (null !== (chunk = tr.read(10)))
- out.push(chunk);
- });
-
- tr.on('end', function() {
- t.same(out, expect);
- t.end();
- });
- });
-
- test('setEncoding hex with read(13)', function(t) {
- var tr = new TestReader(100);
- tr.setEncoding('hex');
- var out = [];
- var expect =
- [ '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '16161' ];
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('setEncoding hex with read(13)', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
tr.on('readable', function flow() {
- //console.log('readable once');
- var chunk;
- while (null !== (chunk = tr.read(13)))
- out.push(chunk);
- });
-
- tr.on('end', function() {
- //console.log('END');
- t.same(out, expect);
- t.end();
- });
- });
-
- test('setEncoding base64', function(t) {
- var tr = new TestReader(100);
- tr.setEncoding('base64');
- var out = [];
- var expect =
- [ 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYQ==' ];
+ // console.log('readable once');
+ let chunk
+ while ((chunk = tr.read(13)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ // console.log('END');
+ t.same(out, expect)
+ })
+ })
+ test('setEncoding base64', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100)
+ tr.setEncoding('base64')
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
tr.on('readable', function flow() {
- var chunk;
- while (null !== (chunk = tr.read(10)))
- out.push(chunk);
- });
-
- tr.on('end', function() {
- t.same(out, expect);
- t.end();
- });
- });
-
- test('encoding: utf8', function(t) {
- var tr = new TestReader(100, { encoding: 'utf8' });
- var out = [];
- var expect =
- [ 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa' ];
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('encoding: utf8', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100, {
+ encoding: 'utf8'
+ })
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
tr.on('readable', function flow() {
- var chunk;
- while (null !== (chunk = tr.read(10)))
- out.push(chunk);
- });
-
- tr.on('end', function() {
- t.same(out, expect);
- t.end();
- });
- });
-
-
- test('encoding: hex', function(t) {
- var tr = new TestReader(100, { encoding: 'hex' });
- var out = [];
- var expect =
- [ '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161' ];
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('encoding: hex', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100, {
+ encoding: 'hex'
+ })
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
tr.on('readable', function flow() {
- var chunk;
- while (null !== (chunk = tr.read(10)))
- out.push(chunk);
- });
-
- tr.on('end', function() {
- t.same(out, expect);
- t.end();
- });
- });
-
- test('encoding: hex with read(13)', function(t) {
- var tr = new TestReader(100, { encoding: 'hex' });
- var out = [];
- var expect =
- [ '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '16161' ];
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('encoding: hex with read(13)', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100, {
+ encoding: 'hex'
+ })
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
tr.on('readable', function flow() {
- var chunk;
- while (null !== (chunk = tr.read(13)))
- out.push(chunk);
- });
-
- tr.on('end', function() {
- t.same(out, expect);
- t.end();
- });
- });
-
- test('encoding: base64', function(t) {
- var tr = new TestReader(100, { encoding: 'base64' });
- var out = [];
- var expect =
- [ 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYQ==' ];
+ let chunk
+ while ((chunk = tr.read(13)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('encoding: base64', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100, {
+ encoding: 'base64'
+ })
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
tr.on('readable', function flow() {
- var chunk;
- while (null !== (chunk = tr.read(10)))
- out.push(chunk);
- });
+ let chunk
- tr.on('end', function() {
- t.same(out, expect);
- t.end();
- });
- });
-
- test('chainable', function(t) {
- var tr = new TestReader(100);
- t.equal(tr.setEncoding('utf8'), tr);
- t.end();
- });
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('chainable', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100)
+ t.equal(tr.setEncoding('utf8'), tr)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-set-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-transform.js b/test/browser/test-stream2-transform.js
index 338fcdc5c0..613e4b1334 100644
--- a/test/browser/test-stream2-transform.js
+++ b/test/browser/test-stream2-transform.js
@@ -1,473 +1,479 @@
-'use strict';
-var common = require('../common');
-var PassThrough = require('../../lib/_stream_passthrough');
-var Transform = require('../../lib/_stream_transform');
-
-/////
-module.exports = function (t) {
- t.test('writable side consumption', function(t) {
- var tx = new Transform({
+'use strict'
+
+const { PassThrough, Transform } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('writable side consumption', function (t) {
+ t.plan(3)
+ const tx = new Transform({
highWaterMark: 10
- });
+ })
+ let transformed = 0
+
+ tx._transform = function (chunk, encoding, cb) {
+ transformed += chunk.length
+ tx.push(chunk)
+ cb()
+ }
+
+ for (let i = 1; i <= 10; i++) {
+ tx.write(Buffer.alloc(i))
+ }
+
+ tx.end()
+ t.equal(tx._readableState.length, 10)
+ t.equal(transformed, 10)
+ t.same(
+ tx._writableState.getBuffer().map(function (c) {
+ return c.chunk.length
+ }),
+ [5, 6, 7, 8, 9, 10]
+ )
+ })
+ test('passthrough', function (t) {
+ t.plan(4)
+ const pt = new PassThrough()
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5).toString(), 'l')
+ })
+ test('object passthrough', function (t) {
+ t.plan(7)
+ const pt = new PassThrough({
+ objectMode: true
+ })
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({
+ a: 'b'
+ })
+ pt.end()
+ t.equal(pt.read(), 1)
+ t.equal(pt.read(), true)
+ t.equal(pt.read(), false)
+ t.equal(pt.read(), 0)
+ t.equal(pt.read(), 'foo')
+ t.equal(pt.read(), '')
+ t.same(pt.read(), {
+ a: 'b'
+ })
+ })
+ test('simple transform', function (t) {
+ t.plan(4)
+ const pt = new Transform()
+
+ pt._transform = function (c, e, cb) {
+ const ret = Buffer.alloc(c.length)
+ ret.fill('x')
+ pt.push(ret)
+ cb()
+ }
- var transformed = 0;
- tx._transform = function(chunk, encoding, cb) {
- transformed += chunk.length;
- tx.push(chunk);
- cb();
- };
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'x')
+ })
+ test('simple object transform', function (t) {
+ t.plan(7)
+ const pt = new Transform({
+ objectMode: true
+ })
+
+ pt._transform = function (c, e, cb) {
+ pt.push(JSON.stringify(c))
+ cb()
+ }
+
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({
+ a: 'b'
+ })
+ pt.end()
+ t.equal(pt.read(), '1')
+ t.equal(pt.read(), 'true')
+ t.equal(pt.read(), 'false')
+ t.equal(pt.read(), '0')
+ t.equal(pt.read(), '"foo"')
+ t.equal(pt.read(), '""')
+ t.equal(pt.read(), '{"a":"b"}')
+ })
+ test('async passthrough', function (t) {
+ t.plan(4)
+ const pt = new Transform()
+
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }
- for (var i = 1; i <= 10; i++) {
- tx.write(Buffer.alloc(i));
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5).toString(), 'l')
+ })
+ })
+ test('assymetric transform (expand)', function (t) {
+ t.plan(7)
+ const pt = new Transform() // emit each chunk 2 times.
+
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
+ pt.push(chunk)
+ setTimeout(function () {
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }, 10)
}
- tx.end();
-
- t.equal(tx._readableState.length, 10);
- t.equal(transformed, 10);
- t.equal(tx._transformState.writechunk.length, 5);
- t.same(tx._writableState.getBuffer().map(function(c) {
- return c.chunk.length;
- }), [6, 7, 8, 9, 10]);
-
- t.end();
- });
-
- t.test('passthrough', function(t) {
- var pt = new PassThrough();
-
- pt.write(Buffer.from('foog'));
- pt.write(Buffer.from('bark'));
- pt.write(Buffer.from('bazy'));
- pt.write(Buffer.from('kuel'));
- pt.end();
-
- t.equal(pt.read(5).toString(), 'foogb');
- t.equal(pt.read(5).toString(), 'arkba');
- t.equal(pt.read(5).toString(), 'zykue');
- t.equal(pt.read(5).toString(), 'l');
- t.end();
- });
-
- t.test('object passthrough', function(t) {
- var pt = new PassThrough({ objectMode: true });
-
- pt.write(1);
- pt.write(true);
- pt.write(false);
- pt.write(0);
- pt.write('foo');
- pt.write('');
- pt.write({ a: 'b'});
- pt.end();
-
- t.equal(pt.read(), 1);
- t.equal(pt.read(), true);
- t.equal(pt.read(), false);
- t.equal(pt.read(), 0);
- t.equal(pt.read(), 'foo');
- t.equal(pt.read(), '');
- t.same(pt.read(), { a: 'b'});
- t.end();
- });
-
- t.test('simple transform', function(t) {
- var pt = new Transform();
- pt._transform = function(c, e, cb) {
- var ret = Buffer.alloc(c.length);
- ret.fill('x');
- pt.push(ret);
- cb();
- };
-
- pt.write(Buffer.from('foog'));
- pt.write(Buffer.from('bark'));
- pt.write(Buffer.from('bazy'));
- pt.write(Buffer.from('kuel'));
- pt.end();
-
- t.equal(pt.read(5).toString(), 'xxxxx');
- t.equal(pt.read(5).toString(), 'xxxxx');
- t.equal(pt.read(5).toString(), 'xxxxx');
- t.equal(pt.read(5).toString(), 'x');
- t.end();
- });
-
- t.test('simple object transform', function(t) {
- var pt = new Transform({ objectMode: true });
- pt._transform = function(c, e, cb) {
- pt.push(JSON.stringify(c));
- cb();
- };
-
- pt.write(1);
- pt.write(true);
- pt.write(false);
- pt.write(0);
- pt.write('foo');
- pt.write('');
- pt.write({ a: 'b'});
- pt.end();
-
- t.equal(pt.read(), '1');
- t.equal(pt.read(), 'true');
- t.equal(pt.read(), 'false');
- t.equal(pt.read(), '0');
- t.equal(pt.read(), '"foo"');
- t.equal(pt.read(), '""');
- t.equal(pt.read(), '{"a":"b"}');
- t.end();
- });
-
- t.test('async passthrough', function(t) {
- var pt = new Transform();
- pt._transform = function(chunk, encoding, cb) {
- setTimeout(function() {
- pt.push(chunk);
- cb();
- }, 10);
- };
-
- pt.write(Buffer.from('foog'));
- pt.write(Buffer.from('bark'));
- pt.write(Buffer.from('bazy'));
- pt.write(Buffer.from('kuel'));
- pt.end();
-
- pt.on('finish', function() {
- t.equal(pt.read(5).toString(), 'foogb');
- t.equal(pt.read(5).toString(), 'arkba');
- t.equal(pt.read(5).toString(), 'zykue');
- t.equal(pt.read(5).toString(), 'l');
- t.end();
- });
- });
-
- t.test('assymetric transform (expand)', function(t) {
- var pt = new Transform();
-
- // emit each chunk 2 times.
- pt._transform = function(chunk, encoding, cb) {
- setTimeout(function() {
- pt.push(chunk);
- setTimeout(function() {
- pt.push(chunk);
- cb();
- }, 10);
- }, 10);
- };
-
- pt.write(Buffer.from('foog'));
- pt.write(Buffer.from('bark'));
- pt.write(Buffer.from('bazy'));
- pt.write(Buffer.from('kuel'));
- pt.end();
-
- pt.on('finish', function() {
- t.equal(pt.read(5).toString(), 'foogf');
- t.equal(pt.read(5).toString(), 'oogba');
- t.equal(pt.read(5).toString(), 'rkbar');
- t.equal(pt.read(5).toString(), 'kbazy');
- t.equal(pt.read(5).toString(), 'bazyk');
- t.equal(pt.read(5).toString(), 'uelku');
- t.equal(pt.read(5).toString(), 'el');
- t.end();
- });
- });
-
- t.test('assymetric transform (compress)', function(t) {
- var pt = new Transform();
-
- // each output is the first char of 3 consecutive chunks,
+
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'foogf')
+ t.equal(pt.read(5).toString(), 'oogba')
+ t.equal(pt.read(5).toString(), 'rkbar')
+ t.equal(pt.read(5).toString(), 'kbazy')
+ t.equal(pt.read(5).toString(), 'bazyk')
+ t.equal(pt.read(5).toString(), 'uelku')
+ t.equal(pt.read(5).toString(), 'el')
+ })
+ })
+ test('assymetric transform (compress)', function (t) {
+ t.plan(3)
+ const pt = new Transform() // each output is the first char of 3 consecutive chunks,
// or whatever's left.
- pt.state = '';
-
- pt._transform = function(chunk, encoding, cb) {
- if (!chunk)
- chunk = '';
- var s = chunk.toString();
- setTimeout(function() {
- this.state += s.charAt(0);
- if (this.state.length === 3) {
- pt.push(Buffer.from(this.state));
- this.state = '';
- }
- cb();
- }.bind(this), 10);
- };
- pt._flush = function(cb) {
+ pt.state = ''
+
+ pt._transform = function (chunk, encoding, cb) {
+ if (!chunk) {
+ chunk = ''
+ }
+
+ const s = chunk.toString()
+ setTimeout(
+ function () {
+ this.state += s.charAt(0)
+
+ if (this.state.length === 3) {
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ }
+
+ cb()
+ }.bind(this),
+ 10
+ )
+ }
+
+ pt._flush = function (cb) {
// just output whatever we have.
- pt.push(Buffer.from(this.state));
- this.state = '';
- cb();
- };
-
- pt.write(Buffer.from('aaaa'));
- pt.write(Buffer.from('bbbb'));
- pt.write(Buffer.from('cccc'));
- pt.write(Buffer.from('dddd'));
- pt.write(Buffer.from('eeee'));
- pt.write(Buffer.from('aaaa'));
- pt.write(Buffer.from('bbbb'));
- pt.write(Buffer.from('cccc'));
- pt.write(Buffer.from('dddd'));
- pt.write(Buffer.from('eeee'));
- pt.write(Buffer.from('aaaa'));
- pt.write(Buffer.from('bbbb'));
- pt.write(Buffer.from('cccc'));
- pt.write(Buffer.from('dddd'));
- pt.end();
-
- // 'abcdeabcdeabcd'
- pt.on('finish', function() {
- t.equal(pt.read(5).toString(), 'abcde');
- t.equal(pt.read(5).toString(), 'abcde');
- t.equal(pt.read(5).toString(), 'abcd');
- t.end();
- });
- });
-
- // this tests for a stall when data is written to a full stream
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ cb()
+ }
+
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.end() // 'abcdeabcdeabcd'
+
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'abcde')
+ t.equal(pt.read(5).toString(), 'abcde')
+ t.equal(pt.read(5).toString(), 'abcd')
+ })
+ }) // this tests for a stall when data is written to a full stream
// that has empty transforms.
- t.test('complex transform', function(t) {
- var count = 0;
- var saved = null;
- var pt = new Transform({highWaterMark:3});
- pt._transform = function(c, e, cb) {
- if (count++ === 1)
- saved = c;
- else {
+
+ test('complex transform', function (t) {
+ t.plan(2)
+ let count = 0
+ let saved = null
+ const pt = new Transform({
+ highWaterMark: 3
+ })
+
+ pt._transform = function (c, e, cb) {
+ if (count++ === 1) {
+ saved = c
+ } else {
if (saved) {
- pt.push(saved);
- saved = null;
+ pt.push(saved)
+ saved = null
}
- pt.push(c);
+
+ pt.push(c)
}
- cb();
- };
-
- pt.once('readable', function() {
- process.nextTick(function() {
- pt.write(Buffer.from('d'));
- pt.write(Buffer.from('ef'), function() {
- pt.end();
- t.end();
- });
- t.equal(pt.read().toString(), 'abcdef');
- t.equal(pt.read(), null);
- });
- });
-
- pt.write(Buffer.from('abc'));
- });
-
-
- t.test('passthrough event emission', function(t) {
- var pt = new PassThrough();
- var emits = 0;
- pt.on('readable', function() {
- var state = pt._readableState;
- //console.error('>>> emit readable %d', emits);
- emits++;
- });
-
- var i = 0;
-
- pt.write(Buffer.from('foog'));
-
- //console.error('need emit 0');
- pt.write(Buffer.from('bark'));
-
- //console.error('should have emitted readable now 1 === %d', emits);
- t.equal(emits, 1);
-
- t.equal(pt.read(5).toString(), 'foogb');
- t.equal(pt.read(5) + '', 'null');
-
- //console.error('need emit 1');
-
- pt.write(Buffer.from('bazy'));
- //console.error('should have emitted, but not again');
- pt.write(Buffer.from('kuel'));
-
- //console.error('should have emitted readable now 2 === %d', emits);
- t.equal(emits, 2);
-
- t.equal(pt.read(5).toString(), 'arkba');
- t.equal(pt.read(5).toString(), 'zykue');
- t.equal(pt.read(5), null);
-
- //console.error('need emit 2');
-
- pt.end();
-
- t.equal(emits, 3);
-
- t.equal(pt.read(5).toString(), 'l');
- t.equal(pt.read(5), null);
-
- //console.error('should not have emitted again');
- t.equal(emits, 3);
- t.end();
- });
-
- t.test('passthrough event emission reordered', function(t) {
- var pt = new PassThrough();
- var emits = 0;
- pt.on('readable', function() {
- //console.error('emit readable', emits);
- emits++;
- });
-
- pt.write(Buffer.from('foog'));
- //console.error('need emit 0');
- pt.write(Buffer.from('bark'));
- //console.error('should have emitted readable now 1 === %d', emits);
- t.equal(emits, 1);
-
- t.equal(pt.read(5).toString(), 'foogb');
- t.equal(pt.read(5), null);
-
- //console.error('need emit 1');
- pt.once('readable', function() {
- t.equal(pt.read(5).toString(), 'arkba');
-
- t.equal(pt.read(5), null);
-
- //console.error('need emit 2');
- pt.once('readable', function() {
- t.equal(pt.read(5).toString(), 'zykue');
- t.equal(pt.read(5), null);
- pt.once('readable', function() {
- t.equal(pt.read(5).toString(), 'l');
- t.equal(pt.read(5), null);
- t.equal(emits, 4);
- t.end();
- });
- pt.end();
- });
- pt.write(Buffer.from('kuel'));
- });
-
- pt.write(Buffer.from('bazy'));
- });
-
- t.test('passthrough facaded', function(t) {
- //console.error('passthrough facaded');
- var pt = new PassThrough();
- var datas = [];
- pt.on('data', function(chunk) {
- datas.push(chunk.toString());
- });
-
- pt.on('end', function() {
- t.same(datas, ['foog', 'bark', 'bazy', 'kuel']);
- t.end();
- });
-
- pt.write(Buffer.from('foog'));
- setTimeout(function() {
- pt.write(Buffer.from('bark'));
- setTimeout(function() {
- pt.write(Buffer.from('bazy'));
- setTimeout(function() {
- pt.write(Buffer.from('kuel'));
- setTimeout(function() {
- pt.end();
- }, 10);
- }, 10);
- }, 10);
- }, 10);
- });
-
- t.test('object transform (json parse)', function(t) {
- //console.error('json parse stream');
- var jp = new Transform({ objectMode: true });
- jp._transform = function(data, encoding, cb) {
+ cb()
+ }
+
+ pt.once('readable', function () {
+ process.nextTick(function () {
+ pt.write(Buffer.from('d'))
+ pt.write(Buffer.from('ef'), function () {
+ pt.end()
+ })
+ t.equal(pt.read().toString(), 'abcdef')
+ t.equal(pt.read(), null)
+ })
+ })
+ pt.write(Buffer.from('abc'))
+ })
+ test('passthrough event emission', function (t) {
+ t.plan(11)
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ // console.error('>>> emit readable %d', emits);
+ emits++
+ })
+ pt.write(Buffer.from('foog')) // console.error('need emit 0');
+
+ pt.write(Buffer.from('bark'))
+ setTimeout(() => {
+ // console.error('should have emitted readable now 1 === %d', emits)
+ t.equal(emits, 1)
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5) + '', 'null') // console.error('need emit 1');
+
+ pt.write(Buffer.from('bazy')) // console.error('should have emitted, but not again');
+
+ pt.write(Buffer.from('kuel')) // console.error('should have emitted readable now 2 === %d', emits);
+
+ setTimeout(() => {
+ t.equal(emits, 2)
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5), null) // console.error('need emit 2');
+
+ pt.end()
+ setTimeout(() => {
+ t.equal(emits, 3)
+ t.equal(pt.read(5).toString(), 'l')
+ t.equal(pt.read(5), null) // console.error('should not have emitted again');
+
+ t.equal(emits, 3)
+ })
+ })
+ })
+ })
+ test('passthrough event emission reordered', function (t) {
+ t.plan(10)
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ // console.error('emit readable', emits);
+ emits++
+ })
+ pt.write(Buffer.from('foog')) // console.error('need emit 0');
+
+ pt.write(Buffer.from('bark'))
+ setTimeout(() => {
+ // console.error('should have emitted readable now 1 === %d', emits);
+ t.equal(emits, 1)
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5), null) // console.error('need emit 1');
+
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5), null) // console.error('need emit 2');
+
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5), null)
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'l')
+ t.equal(pt.read(5), null)
+ t.equal(emits, 4)
+ })
+ pt.end()
+ })
+ pt.write(Buffer.from('kuel'))
+ })
+ pt.write(Buffer.from('bazy'))
+ })
+ })
+ test('passthrough facaded', function (t) {
+ t.plan(1) // console.error('passthrough facaded');
+
+ const pt = new PassThrough()
+ const datas = []
+ pt.on('data', function (chunk) {
+ datas.push(chunk.toString())
+ })
+ pt.on('end', function () {
+ t.same(datas, ['foog', 'bark', 'bazy', 'kuel'])
+ })
+ pt.write(Buffer.from('foog'))
+ setTimeout(function () {
+ pt.write(Buffer.from('bark'))
+ setTimeout(function () {
+ pt.write(Buffer.from('bazy'))
+ setTimeout(function () {
+ pt.write(Buffer.from('kuel'))
+ setTimeout(function () {
+ pt.end()
+ }, 10)
+ }, 10)
+ }, 10)
+ }, 10)
+ })
+ test('object transform (json parse)', function (t) {
+ t.plan(5) // console.error('json parse stream');
+
+ const jp = new Transform({
+ objectMode: true
+ })
+
+ jp._transform = function (data, encoding, cb) {
try {
- jp.push(JSON.parse(data));
- cb();
+ jp.push(JSON.parse(data))
+ cb()
} catch (er) {
- cb(er);
+ cb(er)
}
- };
-
- // anything except null/undefined is fine.
+ } // anything except null/undefined is fine.
// those are "magic" in the stream API, because they signal EOF.
- var objects = [
- { foo: 'bar' },
+
+ const objects = [
+ {
+ foo: 'bar'
+ },
100,
'string',
- { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }
- ];
-
- var ended = false;
- jp.on('end', function() {
- ended = true;
- });
-
- forEach(objects, function(obj) {
- jp.write(JSON.stringify(obj));
- var res = jp.read();
- t.same(res, obj);
- });
-
- jp.end();
- // read one more time to get the 'end' event
- jp.read();
-
- process.nextTick(function() {
- t.ok(ended);
- t.end();
- });
- });
-
- t.test('object transform (json stringify)', function(t) {
- //console.error('json parse stream');
- var js = new Transform({ objectMode: true });
- js._transform = function(data, encoding, cb) {
+ {
+ nested: {
+ things: [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string'
+ ]
+ }
+ }
+ ]
+ let ended = false
+ jp.on('end', function () {
+ ended = true
+ })
+ forEach(objects, function (obj) {
+ jp.write(JSON.stringify(obj))
+ const res = jp.read()
+ t.same(res, obj)
+ })
+ jp.end() // read one more time to get the 'end' event
+
+ jp.read()
+ process.nextTick(function () {
+ t.ok(ended)
+ })
+ })
+ test('object transform (json stringify)', function (t) {
+ t.plan(5) // console.error('json parse stream');
+
+ const js = new Transform({
+ objectMode: true
+ })
+
+ js._transform = function (data, encoding, cb) {
try {
- js.push(JSON.stringify(data));
- cb();
+ js.push(JSON.stringify(data))
+ cb()
} catch (er) {
- cb(er);
+ cb(er)
}
- };
-
- // anything except null/undefined is fine.
+ } // anything except null/undefined is fine.
// those are "magic" in the stream API, because they signal EOF.
- var objects = [
- { foo: 'bar' },
+
+ const objects = [
+ {
+ foo: 'bar'
+ },
100,
'string',
- { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }
- ];
-
- var ended = false;
- js.on('end', function() {
- ended = true;
- });
-
- forEach(objects, function(obj) {
- js.write(obj);
- var res = js.read();
- t.equal(res, JSON.stringify(obj));
- });
-
- js.end();
- // read one more time to get the 'end' event
- js.read();
-
- process.nextTick(function() {
- t.ok(ended);
- t.end();
- });
- });
-
- function forEach (xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
+ {
+ nested: {
+ things: [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string'
+ ]
+ }
+ }
+ ]
+ let ended = false
+ js.on('end', function () {
+ ended = true
+ })
+ forEach(objects, function (obj) {
+ js.write(obj)
+ const res = js.read()
+ t.equal(res, JSON.stringify(obj))
+ })
+ js.end() // read one more time to get the 'end' event
+
+ js.read()
+ process.nextTick(function () {
+ t.ok(ended)
+ })
+ })
+
+ function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
}
}
-};
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-transform'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-unpipe-drain.js b/test/browser/test-stream2-unpipe-drain.js
index d3ebbe86c0..52742520a6 100644
--- a/test/browser/test-stream2-unpipe-drain.js
+++ b/test/browser/test-stream2-unpipe-drain.js
@@ -1,65 +1,66 @@
-'use strict';
-var common = require('../common');
-var stream = require('../../');
+'use strict'
-var crypto = require('crypto');
+const crypto = require('crypto')
-var inherits = require('inherits');
-module.exports = function (t) {
- t.test('unpipe drain', function (t) {
- try {
- crypto.randomBytes(9);
- } catch(_) {
- t.ok(true, 'does not suport random, skipping');
- return t.end();
- }
- function TestWriter() {
- stream.Writable.call(this);
- }
- inherits(TestWriter, stream.Writable);
+const inherits = require('inherits')
- TestWriter.prototype._write = function(buffer, encoding, callback) {
- //console.log('write called');
- // super slow write stream (callback never called)
- };
+const stream = require('../../lib/ours/index')
- var dest = new TestWriter();
+const { kReadableStreamSuiteName } = require('./symbols')
- function TestReader(id) {
- stream.Readable.call(this);
- this.reads = 0;
- }
- inherits(TestReader, stream.Readable);
+module.exports = function (t) {
+ try {
+ crypto.randomBytes(9)
+ } catch (_) {
+ t.plan(1)
+ t.ok(true, 'does not suport random, skipping')
+ return
+ }
- TestReader.prototype._read = function(size) {
- this.reads += 1;
- this.push(crypto.randomBytes(size));
- };
+ t.plan(2)
- var src1 = new TestReader();
- var src2 = new TestReader();
+ function TestWriter() {
+ stream.Writable.call(this)
+ }
- src1.pipe(dest);
+ inherits(TestWriter, stream.Writable)
- src1.once('readable', function() {
- process.nextTick(function() {
+ TestWriter.prototype._write = function (buffer, encoding, callback) {
+ // console.log('write called');
+ // super slow write stream (callback never called)
+ }
- src2.pipe(dest);
+ const dest = new TestWriter()
- src2.once('readable', function() {
- process.nextTick(function() {
+ function TestReader(id) {
+ stream.Readable.call(this)
+ this.reads = 0
+ }
- src1.unpipe(dest);
- });
- });
- });
- });
+ inherits(TestReader, stream.Readable)
+ TestReader.prototype._read = function (size) {
+ this.reads += 1
+ this.push(crypto.randomBytes(size))
+ }
- dest.on('unpipe', function() {
- t.equal(src1.reads, 2);
- t.equal(src2.reads, 1);
- t.end();
- });
- });
+ const src1 = new TestReader()
+ const src2 = new TestReader()
+ src1.pipe(dest)
+ src1.once('readable', function () {
+ process.nextTick(function () {
+ src2.pipe(dest)
+ src2.once('readable', function () {
+ process.nextTick(function () {
+ src1.unpipe(dest)
+ })
+ })
+ })
+ })
+ dest.on('unpipe', function () {
+ t.equal(src1.reads, 2)
+ t.equal(src2.reads, 1)
+ })
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-unpipe-drain'
diff --git a/test/browser/test-stream2-writable.js b/test/browser/test-stream2-writable.js
index ca08fdb1b9..0e5c9c7563 100644
--- a/test/browser/test-stream2-writable.js
+++ b/test/browser/test-stream2-writable.js
@@ -1,375 +1,415 @@
-'use strict';
-var common = require('../common');
-var W = require('../../lib/_stream_writable');
-var D = require('../../lib/_stream_duplex');
+'use strict'
-var inherits = require('inherits');
-inherits(TestWriter, W);
+const inherits = require('inherits')
+
+const { Duplex, Writable } = require('../../lib/ours/index')
+
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+inherits(TestWriter, Writable)
function TestWriter() {
- W.apply(this, arguments);
- this.buffer = [];
- this.written = 0;
+ Writable.apply(this, arguments)
+ this.buffer = []
+ this.written = 0
}
-TestWriter.prototype._write = function(chunk, encoding, cb) {
+TestWriter.prototype._write = function (chunk, encoding, cb) {
// simulate a small unpredictable latency
- setTimeout(function() {
- this.buffer.push(chunk.toString());
- this.written += chunk.length;
- cb();
- }.bind(this), Math.floor(Math.random() * 10));
-};
-inherits(Processstdout, W);
+ setTimeout(
+ function () {
+ this.buffer.push(chunk.toString())
+ this.written += chunk.length
+ cb()
+ }.bind(this),
+ Math.floor(Math.random() * 10)
+ )
+}
+
+inherits(Processstdout, Writable)
function Processstdout() {
- W.apply(this, arguments);
- this.buffer = [];
- this.written = 0;
+ Writable.apply(this, arguments)
+ this.buffer = []
+ this.written = 0
}
-Processstdout.prototype._write = function(chunk, encoding, cb) {
- //console.log(chunk.toString());
- cb();
-};
-var chunks = new Array(50);
-for (var i = 0; i < chunks.length; i++) {
- chunks[i] = new Array(i + 1).join('x');
+Processstdout.prototype._write = function (chunk, encoding, cb) {
+ // console.log(chunk.toString());
+ cb()
}
-module.exports = function (t) {
- var test = t.test;
+const chunks = new Array(50)
- if (!process.stdout) {
- process.stdout = new Processstdout();
- }
-
- test('write fast', function(t) {
- var tw = new TestWriter({
- highWaterMark: 100
- });
+for (let i = 0; i < chunks.length; i++) {
+ chunks[i] = new Array(i + 1).join('x')
+}
- tw.on('finish', function() {
- t.same(tw.buffer, chunks, 'got chunks in the right order');
- t.end();
- });
+if (!process.stdout) {
+ process.stdout = new Processstdout()
+}
- forEach(chunks, function(chunk) {
+module.exports = function (test) {
+ test('write fast', function (t) {
+ t.plan(1)
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ })
+ forEach(chunks, function (chunk) {
// screw backpressure. Just buffer it all up.
- tw.write(chunk);
- });
- tw.end();
- });
-
- test('write slow', function(t) {
- var tw = new TestWriter({
+ tw.write(chunk)
+ })
+ tw.end()
+ })
+ test('write slow', function (t) {
+ t.plan(1)
+ const tw = new TestWriter({
highWaterMark: 100
- });
-
- tw.on('finish', function() {
- t.same(tw.buffer, chunks, 'got chunks in the right order');
- t.end();
- });
-
- var i = 0;
- (function W() {
- tw.write(chunks[i++]);
- if (i < chunks.length)
- setTimeout(W, 10);
- else
- tw.end();
- })();
- });
-
- test('write backpressure', function(t) {
- var tw = new TestWriter({
- highWaterMark: 50
- });
+ })
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ })
+ let i = 0
- var drains = 0;
+ ;(function W() {
+ tw.write(chunks[i++])
- tw.on('finish', function() {
- t.same(tw.buffer, chunks, 'got chunks in the right order');
- t.equal(drains, 17);
- t.end();
- });
-
- tw.on('drain', function() {
- drains++;
- });
+ if (i < chunks.length) {
+ setTimeout(W, 10)
+ } else {
+ tw.end()
+ }
+ })()
+ })
+ test('write backpressure', function (t) {
+ t.plan(19)
+ const tw = new TestWriter({
+ highWaterMark: 50
+ })
+ let drains = 0
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ t.equal(drains, 17)
+ })
+ tw.on('drain', function () {
+ drains++
+ })
+ let i = 0
+
+ ;(function W() {
+ let ret
- var i = 0;
- (function W() {
do {
- var ret = tw.write(chunks[i++]);
- } while (ret !== false && i < chunks.length);
+ ret = tw.write(chunks[i++])
+ } while (ret !== false && i < chunks.length)
if (i < chunks.length) {
- t.ok(tw._writableState.length >= 50);
- tw.once('drain', W);
+ t.ok(tw._writableState.length >= 50)
+ tw.once('drain', W)
} else {
- tw.end();
+ tw.end()
}
- })();
- });
-
- test('write bufferize', function(t) {
- var tw = new TestWriter({
+ })()
+ })
+ test('write bufferize', function (t) {
+ t.plan(50)
+ const tw = new TestWriter({
highWaterMark: 100
- });
-
- var encodings =
- [ 'hex',
- 'utf8',
- 'utf-8',
- 'ascii',
- 'binary',
- 'base64',
- 'ucs2',
- 'ucs-2',
- 'utf16le',
- 'utf-16le',
- undefined ];
-
- tw.on('finish', function() {
- t.same(tw.buffer, chunks, 'got the expected chunks');
- });
-
- forEach(chunks, function(chunk, i) {
- var enc = encodings[ i % encodings.length ];
- chunk = Buffer.from(chunk);
- tw.write(chunk.toString(enc), enc);
- });
- t.end();
- });
-
- test('write no bufferize', function(t) {
- var tw = new TestWriter({
+ })
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+ tw.on('finish', function () {
+ forEach(chunks, function (chunk, i) {
+ const actual = Buffer.from(tw.buffer[i])
+ chunk = Buffer.from(chunk) // Some combination of encoding and length result in the last byte replaced by two extra null bytes
+
+ if (actual[actual.length - 1] === 0) {
+ chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])])
+ } // In some cases instead there is one byte less
+
+ if (actual.length === chunk.length - 1) {
+ chunk = chunk.slice(0, chunk.length - 1)
+ }
+
+ t.same(actual, chunk, 'got the expected chunks ' + i)
+ })
+ })
+ forEach(chunks, function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
+ tw.end()
+ })
+ test('write no bufferize', function (t) {
+ t.plan(100)
+ const tw = new TestWriter({
highWaterMark: 100,
decodeStrings: false
- });
-
- tw._write = function(chunk, encoding, cb) {
- t.equals(typeof chunk, 'string');
- chunk = Buffer.from(chunk, encoding);
- return TestWriter.prototype._write.call(this, chunk, encoding, cb);
- };
-
- var encodings =
- [ 'hex',
- 'utf8',
- 'utf-8',
- 'ascii',
- 'binary',
- 'base64',
- 'ucs2',
- 'ucs-2',
- 'utf16le',
- 'utf-16le',
- undefined ];
-
- tw.on('finish', function() {
- t.same(tw.buffer, chunks, 'got the expected chunks');
- });
-
- forEach(chunks, function(chunk, i) {
- var enc = encodings[ i % encodings.length ];
- chunk = Buffer.from(chunk);
- tw.write(chunk.toString(enc), enc);
- });
- t.end();
- });
-
- test('write callbacks', function(t) {
- var callbacks = chunks.map(function(chunk, i) {
- return [i, function(er) {
- callbacks._called[i] = chunk;
- }];
- }).reduce(function(set, x) {
- set['callback-' + x[0]] = x[1];
- return set;
- }, {});
- callbacks._called = [];
-
- var tw = new TestWriter({
+ })
+
+ tw._write = function (chunk, encoding, cb) {
+ t.equals(typeof chunk, 'string')
+ chunk = Buffer.from(chunk, encoding)
+ return TestWriter.prototype._write.call(this, chunk, encoding, cb)
+ }
+
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+ tw.on('finish', function () {
+ forEach(chunks, function (chunk, i) {
+ const actual = Buffer.from(tw.buffer[i])
+ chunk = Buffer.from(chunk) // Some combination of encoding and length result in the last byte replaced by two extra null bytes
+
+ if (actual[actual.length - 1] === 0) {
+ chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])])
+ } // In some cases instead there is one byte less
+
+ if (actual.length === chunk.length - 1) {
+ chunk = chunk.slice(0, chunk.length - 1)
+ }
+
+ t.same(actual, chunk, 'got the expected chunks ' + i)
+ })
+ })
+ forEach(chunks, function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
+ tw.end()
+ })
+ test('write callbacks', function (t) {
+ t.plan(2)
+ const callbacks = chunks
+ .map(function (chunk, i) {
+ return [
+ i,
+ function (er) {
+ callbacks._called[i] = chunk
+ }
+ ]
+ })
+ .reduce(function (set, x) {
+ set['callback-' + x[0]] = x[1]
+ return set
+ }, {})
+ callbacks._called = []
+ const tw = new TestWriter({
highWaterMark: 100
- });
-
- tw.on('finish', function() {
- process.nextTick(function() {
- t.same(tw.buffer, chunks, 'got chunks in the right order');
- t.same(callbacks._called, chunks, 'called all callbacks');
- t.end();
- });
- });
-
- forEach(chunks, function(chunk, i) {
- tw.write(chunk, callbacks['callback-' + i]);
- });
- tw.end();
- });
-
- test('end callback', function(t) {
- var tw = new TestWriter();
- tw.end(function() {
- t.end();
- });
- });
-
- test('end callback with chunk', function(t) {
- var tw = new TestWriter();
- tw.end(Buffer.from('hello world'), function() {
- t.end();
- });
- });
-
- test('end callback with chunk and encoding', function(t) {
- var tw = new TestWriter();
- tw.end('hello world', 'ascii', function() {
- t.end();
- });
- });
-
- test('end callback after .write() call', function(t) {
- var tw = new TestWriter();
- tw.write(Buffer.from('hello world'));
- tw.end(function() {
- t.end();
- });
- });
-
- test('end callback called after write callback', function(t) {
- var tw = new TestWriter();
- var writeCalledback = false;
- tw.write(Buffer.from('hello world'), function() {
- writeCalledback = true;
- });
- tw.end(function() {
- t.equal(writeCalledback, true);
- t.end();
- });
- });
-
- test('encoding should be ignored for buffers', function(t) {
- var tw = new W();
- var hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb';
- tw._write = function(chunk, encoding, cb) {
- t.equal(chunk.toString('hex'), hex);
- t.end();
- };
- var buf = Buffer.from(hex, 'hex');
- tw.write(buf, 'binary');
- });
-
- test('writables are not pipable', function(t) {
- var w = new W();
- w._write = function() {};
- var gotError = false;
- w.on('error', function(er) {
- gotError = true;
- });
- w.pipe(process.stdout);
- t.ok(gotError);
- t.end();
- });
-
- test('duplexes are pipable', function(t) {
- var d = new D();
- d._read = function() {};
- d._write = function() {};
- var gotError = false;
- d.on('error', function(er) {
- gotError = true;
- });
- d.pipe(process.stdout);
- t.ok(!gotError);
- t.end();
- });
-
- test('end(chunk) two times is an error', function(t) {
- var w = new W();
- w._write = function() {};
- var gotError = false;
- w.on('error', function(er) {
- gotError = true;
- t.equal(er.message, 'write after end');
- });
- w.end('this is the end');
- w.end('and so is this');
- process.nextTick(function() {
- t.ok(gotError);
- t.end();
- });
- });
-
- test('dont end while writing', function(t) {
- var w = new W();
- var wrote = false;
- w._write = function(chunk, e, cb) {
- t.ok(!this.writing);
- wrote = true;
- this.writing = true;
- setTimeout(function() {
- this.writing = false;
- cb();
- });
- };
- w.on('finish', function() {
- t.ok(wrote);
- t.end();
- });
- w.write(Buffer(0));
- w.end();
- });
-
- test('finish does not come before write cb', function(t) {
- var w = new W();
- var writeCb = false;
- w._write = function(chunk, e, cb) {
- setTimeout(function() {
- writeCb = true;
- cb();
- }, 10);
- };
- w.on('finish', function() {
- t.ok(writeCb);
- t.end();
- });
- w.write(Buffer(0));
- w.end();
- });
-
- test('finish does not come before sync _write cb', function(t) {
- var w = new W();
- var writeCb = false;
- w._write = function(chunk, e, cb) {
- cb();
- };
- w.on('finish', function() {
- t.ok(writeCb);
- t.end();
- });
- w.write(Buffer(0), function(er) {
- writeCb = true;
- });
- w.end();
- });
-
- test('finish is emitted if last chunk is empty', function(t) {
- var w = new W();
- w._write = function(chunk, e, cb) {
- process.nextTick(cb);
- };
- w.on('finish', function() {
- t.end();
- });
- w.write(Buffer(1));
- w.end(Buffer(0));
- });
-
- function forEach (xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
+ })
+ tw.on('finish', function () {
+ process.nextTick(function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ t.same(callbacks._called, chunks, 'called all callbacks')
+ })
+ })
+ forEach(chunks, function (chunk, i) {
+ tw.write(chunk, callbacks['callback-' + i])
+ })
+ tw.end()
+ })
+ test('end callback', function (t) {
+ t.plan(1)
+ const tw = new TestWriter()
+ tw.end(() => {
+ t.ok(true)
+ })
+ })
+ test('end callback with chunk', function (t) {
+ t.plan(1)
+ const tw = new TestWriter()
+ tw.end(Buffer.from('hello world'), () => {
+ t.ok(true)
+ })
+ })
+ test('end callback with chunk and encoding', function (t) {
+ t.plan(1)
+ const tw = new TestWriter()
+ tw.end('hello world', 'ascii', () => {
+ t.ok(true)
+ })
+ })
+ test('end callback after .write() call', function (t) {
+ t.plan(1)
+ const tw = new TestWriter()
+ tw.write(Buffer.from('hello world'))
+ tw.end(() => {
+ t.ok(true)
+ })
+ })
+ test('end callback called after write callback', function (t) {
+ t.plan(1)
+ const tw = new TestWriter()
+ let writeCalledback = false
+ tw.write(Buffer.from('hello world'), function () {
+ writeCalledback = true
+ })
+ tw.end(function () {
+ t.equal(writeCalledback, true)
+ })
+ })
+ test('encoding should be ignored for buffers', function (t) {
+ t.plan(1)
+ const tw = new Writable()
+ const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'
+
+ tw._write = function (chunk, encoding, cb) {
+ t.equal(chunk.toString('hex'), hex)
+ }
+
+ const buf = Buffer.from(hex, 'hex')
+ tw.write(buf, 'binary')
+ })
+ test('writables are not pipable', function (t) {
+ t.plan(1)
+ const w = new Writable({
+ autoDestroy: false
+ })
+
+ w._write = function () {}
+
+ let gotError = false
+ w.on('error', function (er) {
+ gotError = true
+ })
+ w.pipe(process.stdout)
+ t.ok(gotError)
+ })
+ test('duplexes are pipable', function (t) {
+ t.plan(1)
+ const d = new Duplex()
+
+ d._read = function () {}
+
+ d._write = function () {}
+
+ let gotError = false
+ d.on('error', function (er) {
+ gotError = true
+ })
+ d.pipe(process.stdout)
+ t.notOk(gotError)
+ })
+ test('end(chunk) two times is an error', function (t) {
+ t.plan(2)
+ const w = new Writable()
+
+ w._write = function () {}
+
+ let gotError = false
+ w.on('error', function (er) {
+ gotError = true
+ t.equal(er.message, 'write after end')
+ })
+ w.end('this is the end')
+ w.end('and so is this')
+ process.nextTick(function () {
+ t.ok(gotError)
+ })
+ })
+ test('dont end while writing', function (t) {
+ t.plan(2)
+ const w = new Writable()
+ let wrote = false
+
+ w._write = function (chunk, e, cb) {
+ t.notOk(this.writing)
+ wrote = true
+ this.writing = true
+ setTimeout(function () {
+ this.writing = false
+ cb()
+ })
+ }
+
+ w.on('finish', function () {
+ t.ok(wrote)
+ })
+ w.write(Buffer.alloc(0))
+ w.end()
+ })
+ test('finish does not come before write cb', function (t) {
+ t.plan(1)
+ const w = new Writable()
+ let writeCb = false
+
+ w._write = function (chunk, e, cb) {
+ setTimeout(function () {
+ writeCb = true
+ cb()
+ }, 10)
+ }
+
+ w.on('finish', function () {
+ t.ok(writeCb)
+ })
+ w.write(Buffer.alloc(0))
+ w.end()
+ })
+ test('finish does not come before sync _write cb', function (t) {
+ t.plan(1)
+ const w = new Writable()
+ let writeCb = false
+
+ w._write = function (chunk, e, cb) {
+ cb()
+ }
+
+ w.on('finish', function () {
+ t.ok(writeCb)
+ })
+ w.write(Buffer.alloc(0), function (er) {
+ writeCb = true
+ })
+ w.end()
+ })
+ test('finish is emitted if last chunk is empty', function (t) {
+ t.plan(1)
+ const w = new Writable()
+
+ w._write = function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
+
+ w.on('finish', () => {
+ t.ok(true)
+ })
+ w.write(Buffer.alloc(1))
+ w.end(Buffer.alloc(0))
+ })
+
+ function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
}
}
}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-writable'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream3-pause-then-read.js b/test/browser/test-stream3-pause-then-read.js
index 9684ea7774..a2399f6184 100644
--- a/test/browser/test-stream3-pause-then-read.js
+++ b/test/browser/test-stream3-pause-then-read.js
@@ -1,150 +1,158 @@
-'use strict';
-var common = require('../common');
-
-var stream = require('../../');
-var Readable = stream.Readable;
-var Writable = stream.Writable;
-
-module.exports = function (t){
- t.test('pause then read', function (t) {
- var totalChunks = 100;
- var chunkSize = 99;
- var expectTotalData = totalChunks * chunkSize;
- var expectEndingData = expectTotalData;
-
- var r = new Readable({ highWaterMark: 1000 });
- var chunks = totalChunks;
- r._read = function(n) {
- if (!(chunks % 2))
- setImmediate(push);
- else if (!(chunks % 3))
- process.nextTick(push);
- else
- push();
- };
-
- var totalPushed = 0;
- function push() {
- var chunk = chunks-- > 0 ? Buffer.alloc(chunkSize) : null;
- if (chunk) {
- totalPushed += chunk.length;
- chunk.fill('x');
- }
- r.push(chunk);
- }
+'use strict'
- read100();
+const { Readable, Writable } = require('../../lib/ours/index')
- // first we read 100 bytes
- function read100() {
- readn(100, onData);
- }
+const { kReadableStreamSuiteName } = require('./symbols')
- function readn(n, then) {
- //console.error('read %d', n);
- expectEndingData -= n;
- ;(function read() {
- var c = r.read(n);
- if (!c)
- r.once('readable', read);
- else {
- t.equal(c.length, n);
- t.notOk(r._readableState.flowing);
- then();
- }
- })();
+module.exports = function (t) {
+ t.plan(7)
+ const totalChunks = 100
+ const chunkSize = 99
+ const expectTotalData = totalChunks * chunkSize
+ let expectEndingData = expectTotalData
+ const r = new Readable({
+ highWaterMark: 1000
+ })
+ let chunks = totalChunks
+
+ r._read = function (n) {
+ if (!(chunks % 2)) {
+ setImmediate(push)
+ } else if (!(chunks % 3)) {
+ process.nextTick(push)
+ } else {
+ push()
}
+ }
- // then we listen to some data events
- function onData() {
- expectEndingData -= 100;
- //console.error('onData');
- var seen = 0;
- r.on('data', function od(c) {
- seen += c.length;
- if (seen >= 100) {
- // seen enough
- r.removeListener('data', od);
- r.pause();
- if (seen > 100) {
- // oh no, seen too much!
- // put the extra back.
- var diff = seen - 100;
- r.unshift(c.slice(c.length - diff));
- console.error('seen too much', seen, diff);
- }
-
- // Nothing should be lost in between
- setImmediate(pipeLittle);
- }
- });
+ let totalPushed = 0
+
+ function push() {
+ const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize) : null
+
+ if (chunk) {
+ totalPushed += chunk.length
+ chunk.fill('x')
}
- // Just pipe 200 bytes, then unshift the extra and unpipe
- function pipeLittle() {
- expectEndingData -= 200;
- //console.error('pipe a little');
- var w = new Writable();
- var written = 0;
- w.on('finish', function() {
- t.equal(written, 200);
- setImmediate(read1234);
- });
- w._write = function(chunk, encoding, cb) {
- written += chunk.length;
- if (written >= 200) {
- r.unpipe(w);
- w.end();
- cb();
- if (written > 200) {
- var diff = written - 200;
- written -= diff;
- r.unshift(chunk.slice(chunk.length - diff));
- }
- } else {
- setImmediate(cb);
+ r.push(chunk)
+ }
+
+ read100() // first we read 100 bytes
+
+ function read100() {
+ readn(100, onData)
+ }
+
+ function readn(n, then) {
+ // console.error('read %d', n);
+ expectEndingData -= n
+
+ ;(function read() {
+ const c = r.read(n)
+
+ if (!c) {
+ r.once('readable', read)
+ } else {
+ t.equal(c.length, n)
+ t.notOk(r._readableState.flowing)
+ then()
+ }
+ })()
+ } // then we listen to some data events
+
+ function onData() {
+ expectEndingData -= 100 // console.error('onData');
+
+ let seen = 0
+ r.on('data', function od(c) {
+ seen += c.length
+
+ if (seen >= 100) {
+ // seen enough
+ r.removeListener('data', od)
+ r.pause()
+
+ if (seen > 100) {
+ // oh no, seen too much!
+ // put the extra back.
+ const diff = seen - 100
+ r.unshift(c.slice(c.length - diff)) // console.error('seen too much', seen, diff)
+ } // Nothing should be lost in between
+
+ setImmediate(pipeLittle)
+ }
+ })
+ } // Just pipe 200 bytes, then unshift the extra and unpipe
+
+ function pipeLittle() {
+ expectEndingData -= 200 // console.error('pipe a little');
+
+ const w = new Writable()
+ let written = 0
+ w.on('finish', function () {
+ t.equal(written, 200)
+ setImmediate(read1234)
+ })
+
+ w._write = function (chunk, encoding, cb) {
+ written += chunk.length
+
+ if (written >= 200) {
+ r.unpipe(w)
+ w.end()
+ cb()
+
+ if (written > 200) {
+ const diff = written - 200
+ written -= diff
+ r.unshift(chunk.slice(chunk.length - diff))
}
- };
- r.pipe(w);
+ } else {
+ setImmediate(cb)
+ }
}
- // now read 1234 more bytes
- function read1234() {
- readn(1234, resumePause);
- }
+ r.pipe(w)
+ } // now read 1234 more bytes
- function resumePause() {
- //console.error('resumePause');
- // don't read anything, just resume and re-pause a whole bunch
- r.resume();
- r.pause();
- r.resume();
- r.pause();
- r.resume();
- r.pause();
- r.resume();
- r.pause();
- r.resume();
- r.pause();
- setImmediate(pipe);
- }
+ function read1234() {
+ readn(1234, resumePause)
+ }
+ function resumePause() {
+ // console.error('resumePause');
+ // don't read anything, just resume and re-pause a whole bunch
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ setImmediate(pipe)
+ }
- function pipe() {
- //console.error('pipe the rest');
- var w = new Writable();
- var written = 0;
- w._write = function(chunk, encoding, cb) {
- written += chunk.length;
- cb();
- };
- w.on('finish', function() {
- //console.error('written', written, totalPushed);
- t.equal(written, expectEndingData);
- t.equal(totalPushed, expectTotalData);
- t.end();
- });
- r.pipe(w);
+ function pipe() {
+ // console.error('pipe the rest');
+ const w = new Writable()
+ let written = 0
+
+ w._write = function (chunk, encoding, cb) {
+ written += chunk.length
+ cb()
}
- });
+
+ w.on('finish', function () {
+ // console.error('written', written, totalPushed);
+ t.equal(written, expectEndingData)
+ t.equal(totalPushed, expectTotalData)
+ })
+ r.pipe(w)
+ }
}
+
+module.exports[kReadableStreamSuiteName] = 'stream3-pause-then-read'
diff --git a/test/common/README.md b/test/common/README.md
deleted file mode 100644
index 0e773debfc..0000000000
--- a/test/common/README.md
+++ /dev/null
@@ -1,802 +0,0 @@
-/**/
- require('@babel/polyfill');
- var util = require('util');
- for (var i in util) exports[i] = util[i];
- /**/# Node.js Core Test Common Modules
-
-This directory contains modules used to test the Node.js implementation.
-
-## Table of Contents
-
-* [Benchmark module](#benchmark-module)
-* [Common module API](#common-module-api)
-* [Countdown module](#countdown-module)
-* [DNS module](#dns-module)
-* [Duplex pair helper](#duplex-pair-helper)
-* [Fixtures module](#fixtures-module)
-* [Heap dump checker module](#heap-dump-checker-module)
-* [HTTP2 module](#http2-module)
-* [Internet module](#internet-module)
-* [tick module](#tick-module)
-* [tmpdir module](#tmpdir-module)
-* [WPT module](#wpt-module)
-
-## Benchmark Module
-
-The `benchmark` module is used by tests to run benchmarks.
-
-### runBenchmark(name, args, env)
-
-* `name` [<string>] Name of benchmark suite to be run.
-* `args` [<Array>] Array of environment variable key/value pairs (ex:
- `n=1`) to be applied via `--set`.
-* `env` [<Object>] Environment variables to be applied during the run.
-
-## Common Module API
-
-The `common` module is used by tests for consistency across repeated
-tasks.
-
-### allowGlobals(...whitelist)
-* `whitelist` [<Array>] Array of Globals
-* return [<Array>]
-
-Takes `whitelist` and concats that with predefined `knownGlobals`.
-
-### busyLoop(time)
-* `time` [<number>]
-
-Blocks for `time` amount of time.
-
-### canCreateSymLink()
-* return [<boolean>]
-
-Checks whether the current running process can create symlinks. On Windows, this
-returns `false` if the process running doesn't have privileges to create
-symlinks
-([SeCreateSymbolicLinkPrivilege](https://msdn.microsoft.com/en-us/library/windows/desktop/bb530716(v=vs.85).aspx)).
-On non-Windows platforms, this always returns `true`.
-
-### createZeroFilledFile(filename)
-
-Creates a 10 MB file of all null characters.
-
-### disableCrashOnUnhandledRejection()
-
-Removes the `process.on('unhandledRejection')` handler that crashes the process
-after a tick. The handler is useful for tests that use Promises and need to make
-sure no unexpected rejections occur, because currently they result in silent
-failures. However, it is useful in some rare cases to disable it, for example if
-the `unhandledRejection` hook is directly used by the test.
-
-### enoughTestMem
-* [<boolean>]
-
-Indicates if there is more than 1gb of total memory.
-
-### expectsError([fn, ]settings[, exact])
-* `fn` [<Function>] a function that should throw.
-* `settings` [<Object>]
- that must contain the `code` property plus any of the other following
- properties (some properties only apply for `AssertionError`):
- * `code` [<string>]
- expected error must have this value for its `code` property.
- * `type` [<Function>]
- expected error must be an instance of `type` and must be an Error subclass.
- * `message` [<string>] or [<RegExp>]
- if a string is provided for `message`, expected error must have it for its
- `message` property; if a regular expression is provided for `message`, the
- regular expression must match the `message` property of the expected error.
- * `name` [<string>]
- expected error must have this value for its `name` property.
- * `info` <Object> expected error must have the same `info` property
- that is deeply equal to this value.
- * `generatedMessage` [<string>]
- (`AssertionError` only) expected error must have this value for its
- `generatedMessage` property.
- * `actual` <any>
- (`AssertionError` only) expected error must have this value for its
- `actual` property.
- * `expected` <any>
- (`AssertionError` only) expected error must have this value for its
- `expected` property.
- * `operator` <any>
- (`AssertionError` only) expected error must have this value for its
- `operator` property.
-* `exact` [<number>] default = 1
-* return [<Function>]
-
- If `fn` is provided, it will be passed to `assert.throws` as first argument
- and `undefined` will be returned.
- Otherwise a function suitable as callback or for use as a validation function
- passed as the second argument to `assert.throws()` will be returned. If the
- returned function has not been called exactly `exact` number of times when the
- test is complete, then the test will fail.
-
-### expectWarning(name, expected, code)
-* `name` [<string>]
-* `expected` [<string>] | [<Array>]
-* `code` [<string>]
-
-Tests whether `name`, `expected`, and `code` are part of a raised warning. If
-an expected warning does not have a code then `common.noWarnCode` can be used
-to indicate this.
-
-### getArrayBufferViews(buf)
-* `buf` [<Buffer>]
-* return [<ArrayBufferView[]>]
-
-Returns an instance of all possible `ArrayBufferView`s of the provided Buffer.
-
-### getBufferSources(buf)
-* `buf` [<Buffer>]
-* return [<BufferSource[]>]
-
-Returns an instance of all possible `BufferSource`s of the provided Buffer,
-consisting of all `ArrayBufferView` and an `ArrayBuffer`.
-
-### getCallSite(func)
-* `func` [<Function>]
-* return [<string>]
-
-Returns the file name and line number for the provided Function.
-
-### getTTYfd()
-
-Attempts to get a valid TTY file descriptor. Returns `-1` if it fails.
-
-The TTY file descriptor is assumed to be capable of being writable.
-
-### hasCrypto
-* [<boolean>]
-
-Indicates whether OpenSSL is available.
-
-### hasFipsCrypto
-* [<boolean>]
-
-Indicates `hasCrypto` and `crypto` with fips.
-
-### hasIntl
-* [<boolean>]
-
-Indicates if [internationalization] is supported.
-
-### hasIPv6
-* [<boolean>]
-
-Indicates whether `IPv6` is supported on this platform.
-
-### hasMultiLocalhost
-* [<boolean>]
-
-Indicates if there are multiple localhosts available.
-
-### inFreeBSDJail
-* [<boolean>]
-
-Checks whether free BSD Jail is true or false.
-
-### isAIX
-* [<boolean>]
-
-Platform check for Advanced Interactive eXecutive (AIX).
-
-### isAlive(pid)
-* `pid` [<number>]
-* return [<boolean>]
-
-Attempts to 'kill' `pid`
-
-### isFreeBSD
-* [<boolean>]
-
-Platform check for Free BSD.
-
-### isLinux
-* [<boolean>]
-
-Platform check for Linux.
-
-### isLinuxPPCBE
-* [<boolean>]
-
-Platform check for Linux on PowerPC.
-
-### isOSX
-* [<boolean>]
-
-Platform check for macOS.
-
-### isSunOS
-* [<boolean>]
-
-Platform check for SunOS.
-
-### isWindows
-* [<boolean>]
-
-Platform check for Windows.
-
-### localhostIPv4
-* [<string>]
-
-IP of `localhost`.
-
-### localIPv6Hosts
-* [<Array>]
-
-Array of IPV6 representations for `localhost`.
-
-### mustCall([fn][, exact])
-* `fn` [<Function>] default = () => {}
-* `exact` [<number>] default = 1
-* return [<Function>]
-
-Returns a function that calls `fn`. If the returned function has not been called
-exactly `exact` number of times when the test is complete, then the test will
-fail.
-
-If `fn` is not provided, an empty function will be used.
-
-### mustCallAtLeast([fn][, minimum])
-* `fn` [<Function>] default = () => {}
-* `minimum` [<number>] default = 1
-* return [<Function>]
-
-Returns a function that calls `fn`. If the returned function has not been called
-at least `minimum` number of times when the test is complete, then the test will
-fail.
-
-If `fn` is not provided, an empty function will be used.
-
-### mustNotCall([msg])
-* `msg` [<string>] default = 'function should not have been called'
-* return [<Function>]
-
-Returns a function that triggers an `AssertionError` if it is invoked. `msg` is
-used as the error message for the `AssertionError`.
-
-### nodeProcessAborted(exitCode, signal)
-* `exitCode` [<number>]
-* `signal` [<string>]
-* return [<boolean>]
-
-Returns `true` if the exit code `exitCode` and/or signal name `signal` represent
-the exit code and/or signal name of a node process that aborted, `false`
-otherwise.
-
-### noWarnCode
-See `common.expectWarning()` for usage.
-
-### opensslCli
-* [<boolean>]
-
-Indicates whether 'opensslCli' is supported.
-
-### platformTimeout(ms)
-* `ms` [<number>]
-* return [<number>]
-
-Platform normalizes timeout.
-
-### PIPE
-* [<string>]
-
-Path to the test socket.
-
-### PORT
-* [<number>]
-
-A port number for tests to use if one is needed.
-
-### printSkipMessage(msg)
-* `msg` [<string>]
-
-Logs '1..0 # Skipped: ' + `msg`
-
-### pwdCommand
-* [<array>] First two argument for the `spawn`/`exec` functions.
-
-Platform normalized `pwd` command options. Usage example:
-```js
-const common = require('../common');
-const { spawn } = require('child_process');
-
-spawn(...common.pwdCommand, { stdio: ['pipe'] });
-```
-
-### rootDir
-* [<string>]
-
-Path to the 'root' directory. either `/` or `c:\\` (windows)
-
-### runWithInvalidFD(func)
-* `func` [<Function>]
-
-Runs `func` with an invalid file descriptor that is an unsigned integer and
-can be used to trigger `EBADF` as the first argument. If no such file
-descriptor could be generated, a skip message will be printed and the `func`
-will not be run.
-
-### skip(msg)
-* `msg` [<string>]
-
-Logs '1..0 # Skipped: ' + `msg` and exits with exit code `0`.
-
-### skipIfEslintMissing()
-
-Skip the rest of the tests in the current file when `ESLint` is not available
-at `tools/node_modules/eslint`
-
-### skipIfInspectorDisabled()
-
-Skip the rest of the tests in the current file when the Inspector
-was disabled at compile time.
-
-### skipIf32Bits()
-
-Skip the rest of the tests in the current file when the Node.js executable
-was compiled with a pointer size smaller than 64 bits.
-
-### skipIfWorker()
-
-Skip the rest of the tests in the current file when not running on a main
-thread.
-
-## ArrayStream Module
-
-The `ArrayStream` module provides a simple `Stream` that pushes elements from
-a given array.
-
-
-```js
-const ArrayStream = require('../common/arraystream');
-const stream = new ArrayStream();
-stream.run(['a', 'b', 'c']);
-```
-
-It can be used within tests as a simple mock stream.
-
-## Countdown Module
-
-The `Countdown` module provides a simple countdown mechanism for tests that
-require a particular action to be taken after a given number of completed
-tasks (for instance, shutting down an HTTP server after a specific number of
-requests). The Countdown will fail the test if the remainder did not reach 0.
-
-
-```js
-const Countdown = require('../common/countdown');
-
-function doSomething() {
- console.log('.');
-}
-
-const countdown = new Countdown(2, doSomething);
-countdown.dec();
-countdown.dec();
-```
-
-### new Countdown(limit, callback)
-
-* `limit` {number}
-* `callback` {function}
-
-Creates a new `Countdown` instance.
-
-### Countdown.prototype.dec()
-
-Decrements the `Countdown` counter.
-
-### Countdown.prototype.remaining
-
-Specifies the remaining number of times `Countdown.prototype.dec()` must be
-called before the callback is invoked.
-
-## DNS Module
-
-The `DNS` module provides utilities related to the `dns` built-in module.
-
-### errorLookupMock(code, syscall)
-
-* `code` [<string>] Defaults to `dns.mockedErrorCode`.
-* `syscall` [<string>] Defaults to `dns.mockedSysCall`.
-* return [<Function>]
-
-A mock for the `lookup` option of `net.connect()` that would result in an error
-with the `code` and the `syscall` specified. Returns a function that has the
-same signature as `dns.lookup()`.
-
-### mockedErrorCode
-
-The default `code` of errors generated by `errorLookupMock`.
-
-### mockedSysCall
-
-The default `syscall` of errors generated by `errorLookupMock`.
-
-### readDomainFromPacket(buffer, offset)
-
-* `buffer` [<Buffer>]
-* `offset` [<number>]
-* return [<Object>]
-
-Reads the domain string from a packet and returns an object containing the
-number of bytes read and the domain.
-
-### parseDNSPacket(buffer)
-
-* `buffer` [<Buffer>]
-* return [<Object>]
-
-Parses a DNS packet. Returns an object with the values of the various flags of
-the packet depending on the type of packet.
-
-### writeIPv6(ip)
-
-* `ip` [<string>]
-* return [<Buffer>]
-
-Reads an IPv6 String and returns a Buffer containing the parts.
-
-### writeDomainName(domain)
-
-* `domain` [<string>]
-* return [<Buffer>]
-
-Reads a Domain String and returns a Buffer containing the domain.
-
-### writeDNSPacket(parsed)
-
-* `parsed` [<Object>]
-* return [<Buffer>]
-
-Takes in a parsed Object and writes its fields to a DNS packet as a Buffer
-object.
-
-## Duplex pair helper
-
-The `common/duplexpair` module exports a single function `makeDuplexPair`,
-which returns an object `{ clientSide, serverSide }` where each side is a
-`Duplex` stream connected to the other side.
-
-There is no difference between client or server side beyond their names.
-
-## Fixtures Module
-
-The `common/fixtures` module provides convenience methods for working with
-files in the `test/fixtures` directory.
-
-### fixtures.fixturesDir
-
-* [<string>]
-
-The absolute path to the `test/fixtures/` directory.
-
-### fixtures.path(...args)
-
-* `...args` [<string>]
-
-Returns the result of `path.join(fixtures.fixturesDir, ...args)`.
-
-### fixtures.readSync(args[, enc])
-
-* `args` [<string>] | [<Array>]
-
-Returns the result of
-`fs.readFileSync(path.join(fixtures.fixturesDir, ...args), 'enc')`.
-
-### fixtures.readKey(arg[, enc])
-
-* `arg` [<string>]
-
-Returns the result of
-`fs.readFileSync(path.join(fixtures.fixturesDir, 'keys', arg), 'enc')`.
-
-## Heap dump checker module
-
-This provides utilities for checking the validity of heap dumps.
-This requires the usage of `--expose-internals`.
-
-### heap.recordState()
-
-Create a heap dump and an embedder graph copy for inspection.
-The returned object has a `validateSnapshotNodes` function similar to the
-one listed below. (`heap.validateSnapshotNodes(...)` is a shortcut for
-`heap.recordState().validateSnapshotNodes(...)`.)
-
-### heap.validateSnapshotNodes(name, expected, options)
-
-* `name` [<string>] Look for this string as the name of heap dump nodes.
-* `expected` [<Array>] A list of objects, possibly with an `children`
- property that points to expected other adjacent nodes.
-* `options` [<Array>]
- * `loose` [<boolean>] Do not expect an exact listing of occurrences
- of nodes with name `name` in `expected`.
-
-Create a heap dump and an embedder graph copy and validate occurrences.
-
-
-```js
-validateSnapshotNodes('TLSWRAP', [
- {
- children: [
- { name: 'enc_out' },
- { name: 'enc_in' },
- { name: 'TLSWrap' }
- ]
- }
-]);
-```
-
-## hijackstdio Module
-
-The `hijackstdio` module provides utility functions for temporarily redirecting
-`stdout` and `stderr` output.
-
-
-```js
-const { hijackStdout, restoreStdout } = require('../common/hijackstdio');
-
-hijackStdout((data) => {
- /* Do something with data */
- restoreStdout();
-});
-
-console.log('this is sent to the hijacked listener');
-```
-
-### hijackStderr(listener)
-* `listener` [<Function>]: a listener with a single parameter
- called `data`.
-
-Eavesdrop to `process.stderr.write()` calls. Once `process.stderr.write()` is
-called, `listener` will also be called and the `data` of `write` function will
-be passed to `listener`. What's more, `process.stderr.writeTimes` is a count of
-the number of calls.
-
-### hijackStdout(listener)
-* `listener` [<Function>]: a listener with a single parameter
- called `data`.
-
-Eavesdrop to `process.stdout.write()` calls. Once `process.stdout.write()` is
-called, `listener` will also be called and the `data` of `write` function will
-be passed to `listener`. What's more, `process.stdout.writeTimes` is a count of
-the number of calls.
-
-### restoreStderr()
-
-Restore the original `process.stderr.write()`. Used to restore `stderr` to its
-original state after calling [`hijackstdio.hijackStdErr()`][].
-
-### restoreStdout()
-
-Restore the original `process.stdout.write()`. Used to restore `stdout` to its
-original state after calling [`hijackstdio.hijackStdOut()`][].
-
-
-## HTTP/2 Module
-
-The http2.js module provides a handful of utilities for creating mock HTTP/2
-frames for testing of HTTP/2 endpoints
-
-
-```js
-const http2 = require('../common/http2');
-```
-
-### Class: Frame
-
-The `http2.Frame` is a base class that creates a `Buffer` containing a
-serialized HTTP/2 frame header.
-
-
-```js
-// length is a 24-bit unsigned integer
-// type is an 8-bit unsigned integer identifying the frame type
-// flags is an 8-bit unsigned integer containing the flag bits
-// id is the 32-bit stream identifier, if any.
-const frame = new http2.Frame(length, type, flags, id);
-
-// Write the frame data to a socket
-socket.write(frame.data);
-```
-
-The serialized `Buffer` may be retrieved using the `frame.data` property.
-
-### Class: DataFrame extends Frame
-
-The `http2.DataFrame` is a subclass of `http2.Frame` that serializes a `DATA`
-frame.
-
-
-```js
-// id is the 32-bit stream identifier
-// payload is a Buffer containing the DATA payload
-// padlen is an 8-bit integer giving the number of padding bytes to include
-// final is a boolean indicating whether the End-of-stream flag should be set,
-// defaults to false.
-const frame = new http2.DataFrame(id, payload, padlen, final);
-
-socket.write(frame.data);
-```
-
-### Class: HeadersFrame
-
-The `http2.HeadersFrame` is a subclass of `http2.Frame` that serializes a
-`HEADERS` frame.
-
-
-```js
-// id is the 32-bit stream identifier
-// payload is a Buffer containing the HEADERS payload (see either
-// http2.kFakeRequestHeaders or http2.kFakeResponseHeaders).
-// padlen is an 8-bit integer giving the number of padding bytes to include
-// final is a boolean indicating whether the End-of-stream flag should be set,
-// defaults to false.
-const frame = new http2.HeadersFrame(id, payload, padlen, final);
-
-socket.write(frame.data);
-```
-
-### Class: SettingsFrame
-
-The `http2.SettingsFrame` is a subclass of `http2.Frame` that serializes an
-empty `SETTINGS` frame.
-
-
-```js
-// ack is a boolean indicating whether or not to set the ACK flag.
-const frame = new http2.SettingsFrame(ack);
-
-socket.write(frame.data);
-```
-
-### http2.kFakeRequestHeaders
-
-Set to a `Buffer` instance that contains a minimal set of serialized HTTP/2
-request headers to be used as the payload of a `http2.HeadersFrame`.
-
-
-```js
-const frame = new http2.HeadersFrame(1, http2.kFakeRequestHeaders, 0, true);
-
-socket.write(frame.data);
-```
-
-### http2.kFakeResponseHeaders
-
-Set to a `Buffer` instance that contains a minimal set of serialized HTTP/2
-response headers to be used as the payload a `http2.HeadersFrame`.
-
-
-```js
-const frame = new http2.HeadersFrame(1, http2.kFakeResponseHeaders, 0, true);
-
-socket.write(frame.data);
-```
-
-### http2.kClientMagic
-
-Set to a `Buffer` containing the preamble bytes an HTTP/2 client must send
-upon initial establishment of a connection.
-
-
-```js
-socket.write(http2.kClientMagic);
-```
-
-## Internet Module
-
-The `common/internet` module provides utilities for working with
-internet-related tests.
-
-### internet.addresses
-
-* [<Object>]
- * `INET_HOST` [<string>] A generic host that has registered common
- DNS records, supports both IPv4 and IPv6, and provides basic HTTP/HTTPS
- services
- * `INET4_HOST` [<string>] A host that provides IPv4 services
- * `INET6_HOST` [<string>] A host that provides IPv6 services
- * `INET4_IP` [<string>] An accessible IPv4 IP, defaults to the
- Google Public DNS IPv4 address
- * `INET6_IP` [<string>] An accessible IPv6 IP, defaults to the
- Google Public DNS IPv6 address
- * `INVALID_HOST` [<string>] An invalid host that cannot be resolved
- * `MX_HOST` [<string>] A host with MX records registered
- * `SRV_HOST` [<string>] A host with SRV records registered
- * `PTR_HOST` [<string>] A host with PTR records registered
- * `NAPTR_HOST` [<string>] A host with NAPTR records registered
- * `SOA_HOST` [<string>] A host with SOA records registered
- * `CNAME_HOST` [<string>] A host with CNAME records registered
- * `NS_HOST` [<string>] A host with NS records registered
- * `TXT_HOST` [<string>] A host with TXT records registered
- * `DNS4_SERVER` [<string>] An accessible IPv4 DNS server
- * `DNS6_SERVER` [<string>] An accessible IPv6 DNS server
-
-A set of addresses for internet-related tests. All properties are configurable
-via `NODE_TEST_*` environment variables. For example, to configure
-`internet.addresses.INET_HOST`, set the environment
-variable `NODE_TEST_INET_HOST` to a specified host.
-
-## ongc Module
-
-The `ongc` module allows a garbage collection listener to be installed. The
-module exports a single `onGC()` function.
-
-```js
-require('../common');
-const onGC = require('../common/ongc');
-
-onGC({}, { ongc() { console.log('collected'); } });
-```
-
-### onGC(target, listener)
-* `target` [<Object>]
-* `listener` [<Object>]
- * `ongc` [<Function>]
-
-Installs a GC listener for the collection of `target`.
-
-This uses `async_hooks` for GC tracking. This means that it enables
-`async_hooks` tracking, which may affect the test functionality. It also
-means that between a `global.gc()` call and the listener being invoked
-a full `setImmediate()` invocation passes.
-
-`listener` is an object to make it easier to use a closure; the target object
-should not be in scope when `listener.ongc()` is created.
-
-
-## tick Module
-
-The `tick` module provides a helper function that can be used to call a callback
-after a given number of event loop "ticks".
-
-### tick(x, cb)
-
-* `x` [<number>] Number of event loop "ticks".
-* `cb` [<Function>] A callback function.
-
-## tmpdir Module
-
-The `tmpdir` module supports the use of a temporary directory for testing.
-
-### path
-* [<string>]
-
-The realpath of the testing temporary directory.
-
-### refresh()
-
-Deletes and recreates the testing temporary directory.
-
-## WPT Module
-
-The wpt.js module is a port of parts of
-[W3C testharness.js](https://github.com/w3c/testharness.js) for testing the
-Node.js
-[WHATWG URL API](https://nodejs.org/api/url.html#url_the_whatwg_url_api)
-implementation with tests from
-[W3C Web Platform Tests](https://github.com/w3c/web-platform-tests).
-
-
-[<Array>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array
-[<ArrayBufferView[]>]: https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView
-[<Buffer>]: https://nodejs.org/api/buffer.html#buffer_class_buffer
-[<Function>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function
-[<Object>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object
-[<RegExp>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp
-[<boolean>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type
-[<number>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type
-[<string>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type
-[`hijackstdio.hijackStdErr()`]: #hijackstderrlistener
-[`hijackstdio.hijackStdOut()`]: #hijackstdoutlistener
-[internationalization]: https://github.com/nodejs/node/wiki/Intl
-
-function forEach (xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
diff --git a/test/common/arraystream.js b/test/common/arraystream.js
deleted file mode 100644
index 167f927dff..0000000000
--- a/test/common/arraystream.js
+++ /dev/null
@@ -1,61 +0,0 @@
-"use strict";
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var _require = require('../../'),
- Stream = _require.Stream;
-
-function noop() {} // A stream to push an array into a REPL
-
-
-function ArrayStream() {
- this.run = function (data) {
- var _this = this;
-
- forEach(data, function (line) {
- _this.emit('data', "".concat(line, "\n"));
- });
- };
-}
-
-Object.setPrototypeOf(ArrayStream.prototype, Stream.prototype);
-Object.setPrototypeOf(ArrayStream, Stream);
-ArrayStream.prototype.readable = true;
-ArrayStream.prototype.writable = true;
-ArrayStream.prototype.pause = noop;
-ArrayStream.prototype.resume = noop;
-ArrayStream.prototype.write = noop;
-module.exports = ArrayStream;
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/benchmark.js b/test/common/benchmark.js
deleted file mode 100644
index 1b368bac58..0000000000
--- a/test/common/benchmark.js
+++ /dev/null
@@ -1,78 +0,0 @@
-"use strict";
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var assert = require('assert');
-
-var fork = require('child_process').fork;
-
-var path = require('path');
-
-var runjs = path.join(__dirname, '..', '..', 'benchmark', 'run.js');
-
-function runBenchmark(name, args, env) {
- var argv = [];
-
- for (var _i = 0; _i < args.length; _i++) {
- argv.push('--set');
- argv.push(args[_i]);
- }
-
- argv.push(name);
- var mergedEnv = Object.assign({}, process.env, env);
- var child = fork(runjs, argv, {
- env: mergedEnv,
- stdio: ['inherit', 'pipe', 'inherit', 'ipc']
- });
- child.stdout.setEncoding('utf8');
- var stdout = '';
- child.stdout.on('data', function (line) {
- stdout += line;
- });
- child.on('exit', function (code, signal) {
- assert.strictEqual(code, 0);
- assert.strictEqual(signal, null); // This bit makes sure that each benchmark file is being sent settings such
- // that the benchmark file runs just one set of options. This helps keep the
- // benchmark tests from taking a long time to run. Therefore, each benchmark
- // file should result in three lines of output: a blank line, a line with
- // the name of the benchmark file, and a line with the only results that we
- // get from testing the benchmark file.
-
- assert.ok(/^(?:\n.+?\n.+?\n)+$/.test(stdout), "benchmark file not running exactly one configuration in test: ".concat(stdout));
- });
-}
-
-module.exports = runBenchmark;
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/countdown.js b/test/common/countdown.js
deleted file mode 100644
index 39193672b5..0000000000
--- a/test/common/countdown.js
+++ /dev/null
@@ -1,80 +0,0 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var assert = require('assert');
-
-var kLimit = Symbol('limit');
-var kCallback = Symbol('callback');
-
-var common = require('./');
-
-var Countdown =
-/*#__PURE__*/
-function () {
- function Countdown(limit, cb) {
- _classCallCheck(this, Countdown);
-
- assert.strictEqual(typeof limit, 'number');
- assert.strictEqual(typeof cb, 'function');
- this[kLimit] = limit;
- this[kCallback] = common.mustCall(cb);
- }
-
- _createClass(Countdown, [{
- key: "dec",
- value: function dec() {
- assert(this[kLimit] > 0, 'Countdown expired');
- if (--this[kLimit] === 0) this[kCallback]();
- return this[kLimit];
- }
- }, {
- key: "remaining",
- get: function get() {
- return this[kLimit];
- }
- }]);
-
- return Countdown;
-}();
-
-module.exports = Countdown;
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/dns.js b/test/common/dns.js
deleted file mode 100644
index f63b686fd2..0000000000
--- a/test/common/dns.js
+++ /dev/null
@@ -1,436 +0,0 @@
-"use strict";
-
-function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
-
-function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
-
-function _iterableToArrayLimit(arr, i) { if (!(Symbol.iterator in Object(arr) || Object.prototype.toString.call(arr) === "[object Arguments]")) { return; } var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
-
-function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var assert = require('assert');
-
-var os = require('os');
-
-var types = {
- A: 1,
- AAAA: 28,
- NS: 2,
- CNAME: 5,
- SOA: 6,
- PTR: 12,
- MX: 15,
- TXT: 16,
- ANY: 255
-};
-var classes = {
- IN: 1
-}; // Naïve DNS parser/serializer.
-
-function readDomainFromPacket(buffer, offset) {
- assert.ok(offset < buffer.length);
- var length = buffer[offset];
-
- if (length === 0) {
- return {
- nread: 1,
- domain: ''
- };
- } else if ((length & 0xC0) === 0) {
- offset += 1;
- var chunk = buffer.toString('ascii', offset, offset + length); // Read the rest of the domain.
-
- var _readDomainFromPacket = readDomainFromPacket(buffer, offset + length),
- nread = _readDomainFromPacket.nread,
- domain = _readDomainFromPacket.domain;
-
- return {
- nread: 1 + length + nread,
- domain: domain ? "".concat(chunk, ".").concat(domain) : chunk
- };
- } else {
- // Pointer to another part of the packet.
- assert.strictEqual(length & 0xC0, 0xC0); // eslint-disable-next-line space-infix-ops, space-unary-ops
-
- var pointeeOffset = buffer.readUInt16BE(offset) & ~0xC000;
- return {
- nread: 2,
- domain: readDomainFromPacket(buffer, pointeeOffset)
- };
- }
-}
-
-function parseDNSPacket(buffer) {
- assert.ok(buffer.length > 12);
- var parsed = {
- id: buffer.readUInt16BE(0),
- flags: buffer.readUInt16BE(2)
- };
- var counts = [['questions', buffer.readUInt16BE(4)], ['answers', buffer.readUInt16BE(6)], ['authorityAnswers', buffer.readUInt16BE(8)], ['additionalRecords', buffer.readUInt16BE(10)]];
- var offset = 12;
-
- for (var _i = 0, _counts = counts; _i < _counts.length; _i++) {
- var _counts$_i = _slicedToArray(_counts[_i], 2),
- sectionName = _counts$_i[0],
- count = _counts$_i[1];
-
- parsed[sectionName] = [];
-
- for (var _i2 = 0; _i2 < count; ++_i2) {
- var _readDomainFromPacket2 = readDomainFromPacket(buffer, offset),
- nread = _readDomainFromPacket2.nread,
- domain = _readDomainFromPacket2.domain;
-
- offset += nread;
- var type = buffer.readUInt16BE(offset);
- var rr = {
- domain: domain,
- cls: buffer.readUInt16BE(offset + 2)
- };
- offset += 4;
-
- for (var name in types) {
- if (types[name] === type) rr.type = name;
- }
-
- if (sectionName !== 'questions') {
- rr.ttl = buffer.readInt32BE(offset);
- var dataLength = buffer.readUInt16BE(offset);
- offset += 6;
-
- switch (type) {
- case types.A:
- assert.strictEqual(dataLength, 4);
- rr.address = "".concat(buffer[offset + 0], ".").concat(buffer[offset + 1], ".") + "".concat(buffer[offset + 2], ".").concat(buffer[offset + 3]);
- break;
-
- case types.AAAA:
- assert.strictEqual(dataLength, 16);
- rr.address = buffer.toString('hex', offset, offset + 16).replace(/(.{4}(?!$))/g, '$1:');
- break;
-
- case types.TXT:
- {
- var position = offset;
- rr.entries = [];
-
- while (position < offset + dataLength) {
- var txtLength = buffer[offset];
- rr.entries.push(buffer.toString('utf8', position + 1, position + 1 + txtLength));
- position += 1 + txtLength;
- }
-
- assert.strictEqual(position, offset + dataLength);
- break;
- }
-
- case types.MX:
- {
- rr.priority = buffer.readInt16BE(buffer, offset);
- offset += 2;
-
- var _readDomainFromPacket3 = readDomainFromPacket(buffer, offset),
- _nread = _readDomainFromPacket3.nread,
- _domain = _readDomainFromPacket3.domain;
-
- rr.exchange = _domain;
- assert.strictEqual(_nread, dataLength);
- break;
- }
-
- case types.NS:
- case types.CNAME:
- case types.PTR:
- {
- var _readDomainFromPacket4 = readDomainFromPacket(buffer, offset),
- _nread2 = _readDomainFromPacket4.nread,
- _domain2 = _readDomainFromPacket4.domain;
-
- rr.value = _domain2;
- assert.strictEqual(_nread2, dataLength);
- break;
- }
-
- case types.SOA:
- {
- var mname = readDomainFromPacket(buffer, offset);
- var rname = readDomainFromPacket(buffer, offset + mname.nread);
- rr.nsname = mname.domain;
- rr.hostmaster = rname.domain;
- var trailerOffset = offset + mname.nread + rname.nread;
- rr.serial = buffer.readUInt32BE(trailerOffset);
- rr.refresh = buffer.readUInt32BE(trailerOffset + 4);
- rr.retry = buffer.readUInt32BE(trailerOffset + 8);
- rr.expire = buffer.readUInt32BE(trailerOffset + 12);
- rr.minttl = buffer.readUInt32BE(trailerOffset + 16);
- assert.strictEqual(trailerOffset + 20, dataLength);
- break;
- }
-
- default:
- throw new Error("Unknown RR type ".concat(rr.type));
- }
-
- offset += dataLength;
- }
-
- parsed[sectionName].push(rr);
- assert.ok(offset <= buffer.length);
- }
- }
-
- assert.strictEqual(offset, buffer.length);
- return parsed;
-}
-
-function writeIPv6(ip) {
- var parts = ip.replace(/^:|:$/g, '').split(':');
- var buf = Buffer.alloc(16);
- var offset = 0;
- var _iteratorNormalCompletion = true;
- var _didIteratorError = false;
- var _iteratorError = undefined;
-
- try {
- for (var _iterator = parts[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
- var part = _step.value;
-
- if (part === '') {
- offset += 16 - 2 * (parts.length - 1);
- } else {
- buf.writeUInt16BE(parseInt(part, 16), offset);
- offset += 2;
- }
- }
- } catch (err) {
- _didIteratorError = true;
- _iteratorError = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion && _iterator.return != null) {
- _iterator.return();
- }
- } finally {
- if (_didIteratorError) {
- throw _iteratorError;
- }
- }
- }
-
- return buf;
-}
-
-function writeDomainName(domain) {
- return Buffer.concat(domain.split('.').map(function (label) {
- assert(label.length < 64);
- return Buffer.concat([Buffer.from([label.length]), Buffer.from(label, 'ascii')]);
- }).concat([Buffer.alloc(1)]));
-}
-
-function writeDNSPacket(parsed) {
- var buffers = [];
- var kStandardResponseFlags = 0x8180;
- buffers.push(new Uint16Array([parsed.id, parsed.flags === undefined ? kStandardResponseFlags : parsed.flags, parsed.questions && parsed.questions.length, parsed.answers && parsed.answers.length, parsed.authorityAnswers && parsed.authorityAnswers.length, parsed.additionalRecords && parsed.additionalRecords.length]));
- var _iteratorNormalCompletion2 = true;
- var _didIteratorError2 = false;
- var _iteratorError2 = undefined;
-
- try {
- for (var _iterator2 = parsed.questions[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
- var q = _step2.value;
- assert(types[q.type]);
- buffers.push(writeDomainName(q.domain));
- buffers.push(new Uint16Array([types[q.type], q.cls === undefined ? classes.IN : q.cls]));
- }
- } catch (err) {
- _didIteratorError2 = true;
- _iteratorError2 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion2 && _iterator2.return != null) {
- _iterator2.return();
- }
- } finally {
- if (_didIteratorError2) {
- throw _iteratorError2;
- }
- }
- }
-
- var _iteratorNormalCompletion3 = true;
- var _didIteratorError3 = false;
- var _iteratorError3 = undefined;
-
- try {
- for (var _iterator3 = [].concat(parsed.answers, parsed.authorityAnswers, parsed.additionalRecords)[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
- var rr = _step3.value;
- if (!rr) continue;
- assert(types[rr.type]);
- buffers.push(writeDomainName(rr.domain));
- buffers.push(new Uint16Array([types[rr.type], rr.cls === undefined ? classes.IN : rr.cls]));
- buffers.push(new Int32Array([rr.ttl]));
- var rdLengthBuf = new Uint16Array(1);
- buffers.push(rdLengthBuf);
-
- switch (rr.type) {
- case 'A':
- rdLengthBuf[0] = 4;
- buffers.push(new Uint8Array(rr.address.split('.')));
- break;
-
- case 'AAAA':
- rdLengthBuf[0] = 16;
- buffers.push(writeIPv6(rr.address));
- break;
-
- case 'TXT':
- var total = rr.entries.map(function (s) {
- return s.length;
- }).reduce(function (a, b) {
- return a + b;
- }); // Total length of all strings + 1 byte each for their lengths.
-
- rdLengthBuf[0] = rr.entries.length + total;
- var _iteratorNormalCompletion4 = true;
- var _didIteratorError4 = false;
- var _iteratorError4 = undefined;
-
- try {
- for (var _iterator4 = rr.entries[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) {
- var txt = _step4.value;
- buffers.push(new Uint8Array([Buffer.byteLength(txt)]));
- buffers.push(Buffer.from(txt));
- }
- } catch (err) {
- _didIteratorError4 = true;
- _iteratorError4 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion4 && _iterator4.return != null) {
- _iterator4.return();
- }
- } finally {
- if (_didIteratorError4) {
- throw _iteratorError4;
- }
- }
- }
-
- break;
-
- case 'MX':
- rdLengthBuf[0] = 2;
- buffers.push(new Uint16Array([rr.priority]));
- // fall through
-
- case 'NS':
- case 'CNAME':
- case 'PTR':
- {
- var domain = writeDomainName(rr.exchange || rr.value);
- rdLengthBuf[0] += domain.length;
- buffers.push(domain);
- break;
- }
-
- case 'SOA':
- {
- var mname = writeDomainName(rr.nsname);
- var rname = writeDomainName(rr.hostmaster);
- rdLengthBuf[0] = mname.length + rname.length + 20;
- buffers.push(mname, rname);
- buffers.push(new Uint32Array([rr.serial, rr.refresh, rr.retry, rr.expire, rr.minttl]));
- break;
- }
-
- default:
- throw new Error("Unknown RR type ".concat(rr.type));
- }
- }
- } catch (err) {
- _didIteratorError3 = true;
- _iteratorError3 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion3 && _iterator3.return != null) {
- _iterator3.return();
- }
- } finally {
- if (_didIteratorError3) {
- throw _iteratorError3;
- }
- }
- }
-
- return Buffer.concat(buffers.map(function (typedArray) {
- var buf = Buffer.from(typedArray.buffer, typedArray.byteOffset, typedArray.byteLength);
-
- if (os.endianness() === 'LE') {
- if (typedArray.BYTES_PER_ELEMENT === 2) buf.swap16();
- if (typedArray.BYTES_PER_ELEMENT === 4) buf.swap32();
- }
-
- return buf;
- }));
-}
-
-var mockedErrorCode = 'ENOTFOUND';
-var mockedSysCall = 'getaddrinfo';
-
-function errorLookupMock() {
- var code = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : mockedErrorCode;
- var syscall = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : mockedSysCall;
- return function lookupWithError(hostname, dnsopts, cb) {
- var err = new Error("".concat(syscall, " ").concat(code, " ").concat(hostname));
- err.code = code;
- err.errno = code;
- err.syscall = syscall;
- err.hostname = hostname;
- cb(err);
- };
-}
-
-module.exports = {
- types: types,
- classes: classes,
- writeDNSPacket: writeDNSPacket,
- parseDNSPacket: parseDNSPacket,
- errorLookupMock: errorLookupMock,
- mockedErrorCode: mockedErrorCode,
- mockedSysCall: mockedSysCall
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/duplexpair.js b/test/common/duplexpair.js
deleted file mode 100644
index d4277740aa..0000000000
--- a/test/common/duplexpair.js
+++ /dev/null
@@ -1,118 +0,0 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var _require = require('../../'),
- Duplex = _require.Duplex;
-
-var assert = require('assert');
-
-var kCallback = Symbol('Callback');
-var kOtherSide = Symbol('Other');
-
-var DuplexSocket =
-/*#__PURE__*/
-function (_Duplex) {
- _inherits(DuplexSocket, _Duplex);
-
- function DuplexSocket() {
- var _this;
-
- _classCallCheck(this, DuplexSocket);
-
- _this = _possibleConstructorReturn(this, _getPrototypeOf(DuplexSocket).call(this));
- _this[kCallback] = null;
- _this[kOtherSide] = null;
- return _this;
- }
-
- _createClass(DuplexSocket, [{
- key: "_read",
- value: function _read() {
- var callback = this[kCallback];
-
- if (callback) {
- this[kCallback] = null;
- callback();
- }
- }
- }, {
- key: "_write",
- value: function _write(chunk, encoding, callback) {
- assert.notStrictEqual(this[kOtherSide], null);
- assert.strictEqual(this[kOtherSide][kCallback], null);
- this[kOtherSide][kCallback] = callback;
- this[kOtherSide].push(chunk);
- }
- }, {
- key: "_final",
- value: function _final(callback) {
- this[kOtherSide].on('end', callback);
- this[kOtherSide].push(null);
- }
- }]);
-
- return DuplexSocket;
-}(Duplex);
-
-function makeDuplexPair() {
- var clientSide = new DuplexSocket();
- var serverSide = new DuplexSocket();
- clientSide[kOtherSide] = serverSide;
- serverSide[kOtherSide] = clientSide;
- return {
- clientSide: clientSide,
- serverSide: serverSide
- };
-}
-
-module.exports = makeDuplexPair;
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/fixtures.js b/test/common/fixtures.js
index 1ddc225e71..d6fcd811b8 100644
--- a/test/common/fixtures.js
+++ b/test/common/fixtures.js
@@ -1,74 +1,39 @@
-"use strict";
+'use strict'
-function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); }
+const path = require('path')
-function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance"); }
+const fs = require('fs')
-function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); }
+const { pathToFileURL } = require('url')
-function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } }
+const fixturesDir = path.join(__dirname, '..', 'fixtures')
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
+function fixturesPath(...args) {
+ return path.join(fixturesDir, ...args)
}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-var path = require('path');
-
-var fs = require('fs');
-
-var fixturesDir = path.join(__dirname, '..', 'fixtures');
-
-function fixturesPath() {
- for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
- args[_key] = arguments[_key];
- }
-
- return path.join.apply(path, [fixturesDir].concat(args));
+function fixturesFileURL(...args) {
+ return pathToFileURL(fixturesPath(...args))
}
function readFixtureSync(args, enc) {
- if (Array.isArray(args)) return fs.readFileSync(fixturesPath.apply(void 0, _toConsumableArray(args)), enc);
- return fs.readFileSync(fixturesPath(args), enc);
+ if (Array.isArray(args)) return fs.readFileSync(fixturesPath(...args), enc)
+ return fs.readFileSync(fixturesPath(args), enc)
}
function readFixtureKey(name, enc) {
- return fs.readFileSync(fixturesPath('keys', name), enc);
+ return fs.readFileSync(fixturesPath('keys', name), enc)
+}
+
+function readFixtureKeys(enc, ...names) {
+ return names.map((name) => readFixtureKey(name, enc))
}
module.exports = {
- fixturesDir: fixturesDir,
+ fixturesDir,
path: fixturesPath,
+ fileURL: fixturesFileURL,
readSync: readFixtureSync,
- readKey: readFixtureKey
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
+ readKey: readFixtureKey,
+ readKeys: readFixtureKeys
+}
diff --git a/test/common/fixtures.mjs b/test/common/fixtures.mjs
new file mode 100644
index 0000000000..d6f7f6c092
--- /dev/null
+++ b/test/common/fixtures.mjs
@@ -0,0 +1,17 @@
+import fixtures from './fixtures.js';
+
+const {
+ fixturesDir,
+ path,
+ fileURL,
+ readSync,
+ readKey,
+} = fixtures;
+
+export {
+ fixturesDir,
+ path,
+ fileURL,
+ readSync,
+ readKey,
+};
diff --git a/test/common/heap.js b/test/common/heap.js
deleted file mode 100644
index 0675fdf453..0000000000
--- a/test/common/heap.js
+++ /dev/null
@@ -1,301 +0,0 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var assert = require('assert');
-/**/
-
-
-var util = require('core-util-is');
-
-util.inherits = require('inherits');
-/**/
-
-var internalTestHeap;
-
-try {
- internalTestHeap = require('internal/test/heap');
-} catch (e) {
- console.log('using `test/common/heap.js` requires `--expose-internals`');
- throw e;
-}
-
-var _internalTestHeap = internalTestHeap,
- createJSHeapDump = _internalTestHeap.createJSHeapDump,
- buildEmbedderGraph = _internalTestHeap.buildEmbedderGraph;
-
-function inspectNode(snapshot) {
- return util.inspect(snapshot, {
- depth: 4
- });
-}
-
-function isEdge(edge, _ref) {
- var node_name = _ref.node_name,
- edge_name = _ref.edge_name;
-
- // For ABI compatibility, we did not backport the virtual function
- // AddEdge() with a name as last argument back to v10.x, so edge_name.
- // is ignored.
- // if (edge.name !== edge_name) {
- // return false;
- // }
- // From our internal embedded graph
- if (edge.to.value) {
- if (edge.to.value.constructor.name !== node_name) {
- return false;
- }
- } else if (edge.to.name !== node_name) {
- return false;
- }
-
- return true;
-}
-
-var State =
-/*#__PURE__*/
-function () {
- function State() {
- _classCallCheck(this, State);
-
- this.snapshot = createJSHeapDump();
- this.embedderGraph = buildEmbedderGraph();
- } // Validate the v8 heap snapshot
-
-
- _createClass(State, [{
- key: "validateSnapshot",
- value: function validateSnapshot(rootName, expected) {
- var _ref2 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {},
- _ref2$loose = _ref2.loose,
- loose = _ref2$loose === void 0 ? false : _ref2$loose;
-
- var rootNodes = this.snapshot.filter(function (node) {
- return node.name === rootName && node.type !== 'string';
- });
-
- if (loose) {
- assert(rootNodes.length >= expected.length, "Expect to find at least ".concat(expected.length, " '").concat(rootName, "', ") + "found ".concat(rootNodes.length));
- } else {
- assert.strictEqual(rootNodes.length, expected.length, "Expect to find ".concat(expected.length, " '").concat(rootName, "', ") + "found ".concat(rootNodes.length));
- }
-
- var _iteratorNormalCompletion = true;
- var _didIteratorError = false;
- var _iteratorError = undefined;
-
- try {
- for (var _iterator = expected[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
- var expectation = _step.value;
-
- if (expectation.children) {
- var _iteratorNormalCompletion2 = true;
- var _didIteratorError2 = false;
- var _iteratorError2 = undefined;
-
- try {
- var _loop = function _loop() {
- var expectedEdge = _step2.value;
- var check = typeof expectedEdge === 'function' ? expectedEdge : function (edge) {
- return isEdge(edge, expectedEdge);
- };
- var hasChild = rootNodes.some(function (node) {
- return node.outgoingEdges.some(check);
- }); // Don't use assert with a custom message here. Otherwise the
- // inspection in the message is done eagerly and wastes a lot of CPU
- // time.
-
- if (!hasChild) {
- throw new Error('expected to find child ' + "".concat(util.inspect(expectedEdge), " in ").concat(inspectNode(rootNodes)));
- }
- };
-
- for (var _iterator2 = expectation.children[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
- _loop();
- }
- } catch (err) {
- _didIteratorError2 = true;
- _iteratorError2 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion2 && _iterator2.return != null) {
- _iterator2.return();
- }
- } finally {
- if (_didIteratorError2) {
- throw _iteratorError2;
- }
- }
- }
- }
- }
- } catch (err) {
- _didIteratorError = true;
- _iteratorError = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion && _iterator.return != null) {
- _iterator.return();
- }
- } finally {
- if (_didIteratorError) {
- throw _iteratorError;
- }
- }
- }
- } // Validate our internal embedded graph representation
-
- }, {
- key: "validateGraph",
- value: function validateGraph(rootName, expected) {
- var _ref3 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {},
- _ref3$loose = _ref3.loose,
- loose = _ref3$loose === void 0 ? false : _ref3$loose;
-
- var rootNodes = this.embedderGraph.filter(function (node) {
- return node.name === rootName;
- });
-
- if (loose) {
- assert(rootNodes.length >= expected.length, "Expect to find at least ".concat(expected.length, " '").concat(rootName, "', ") + "found ".concat(rootNodes.length));
- } else {
- assert.strictEqual(rootNodes.length, expected.length, "Expect to find ".concat(expected.length, " '").concat(rootName, "', ") + "found ".concat(rootNodes.length));
- }
-
- var _iteratorNormalCompletion3 = true;
- var _didIteratorError3 = false;
- var _iteratorError3 = undefined;
-
- try {
- for (var _iterator3 = expected[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
- var expectation = _step3.value;
-
- if (expectation.children) {
- var _iteratorNormalCompletion4 = true;
- var _didIteratorError4 = false;
- var _iteratorError4 = undefined;
-
- try {
- var _loop2 = function _loop2() {
- var expectedEdge = _step4.value;
- var check = typeof expectedEdge === 'function' ? expectedEdge : function (edge) {
- return isEdge(edge, expectedEdge);
- }; // Don't use assert with a custom message here. Otherwise the
- // inspection in the message is done eagerly and wastes a lot of CPU
- // time.
-
- var hasChild = rootNodes.some(function (node) {
- return node.edges.some(check);
- });
-
- if (!hasChild) {
- throw new Error('expected to find child ' + "".concat(util.inspect(expectedEdge), " in ").concat(inspectNode(rootNodes)));
- }
- };
-
- for (var _iterator4 = expectation.children[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) {
- _loop2();
- }
- } catch (err) {
- _didIteratorError4 = true;
- _iteratorError4 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion4 && _iterator4.return != null) {
- _iterator4.return();
- }
- } finally {
- if (_didIteratorError4) {
- throw _iteratorError4;
- }
- }
- }
- }
- }
- } catch (err) {
- _didIteratorError3 = true;
- _iteratorError3 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion3 && _iterator3.return != null) {
- _iterator3.return();
- }
- } finally {
- if (_didIteratorError3) {
- throw _iteratorError3;
- }
- }
- }
- }
- }, {
- key: "validateSnapshotNodes",
- value: function validateSnapshotNodes(rootName, expected) {
- var _ref4 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {},
- _ref4$loose = _ref4.loose,
- loose = _ref4$loose === void 0 ? false : _ref4$loose;
-
- this.validateSnapshot(rootName, expected, {
- loose: loose
- });
- this.validateGraph(rootName, expected, {
- loose: loose
- });
- }
- }]);
-
- return State;
-}();
-
-function recordState() {
- return new State();
-}
-
-function validateSnapshotNodes() {
- var _recordState;
-
- return (_recordState = recordState()).validateSnapshotNodes.apply(_recordState, arguments);
-}
-
-module.exports = {
- recordState: recordState,
- validateSnapshotNodes: validateSnapshotNodes
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/hijackstdio.js b/test/common/hijackstdio.js
deleted file mode 100644
index b3003f468a..0000000000
--- a/test/common/hijackstdio.js
+++ /dev/null
@@ -1,73 +0,0 @@
-"use strict";
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-// Hijack stdout and stderr
-
-
-var stdWrite = {};
-
-function hijackStdWritable(name, listener) {
- var stream = process[name];
-
- var _write = stdWrite[name] = stream.write;
-
- stream.writeTimes = 0;
-
- stream.write = function (data, callback) {
- try {
- listener(data);
- } catch (e) {
- process.nextTick(function () {
- throw e;
- });
- }
-
- _write.call(stream, data, callback);
-
- stream.writeTimes++;
- };
-}
-
-function restoreWritable(name) {
- process[name].write = stdWrite[name];
- delete process[name].writeTimes;
-}
-
-module.exports = {
- hijackStdout: hijackStdWritable.bind(null, 'stdout'),
- hijackStderr: hijackStdWritable.bind(null, 'stderr'),
- restoreStdout: restoreWritable.bind(null, 'stdout'),
- restoreStderr: restoreWritable.bind(null, 'stderr')
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/http2.js b/test/common/http2.js
deleted file mode 100644
index 73ea152027..0000000000
--- a/test/common/http2.js
+++ /dev/null
@@ -1,259 +0,0 @@
-"use strict";
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-// An HTTP/2 testing tool used to create mock frames for direct testing
-// of HTTP/2 endpoints.
-
-
-var kFrameData = Symbol('frame-data');
-var FLAG_EOS = 0x1;
-var FLAG_ACK = 0x1;
-var FLAG_EOH = 0x4;
-var FLAG_PADDED = 0x8;
-var PADDING = Buffer.alloc(255);
-var kClientMagic = Buffer.from('505249202a20485454502f322' + 'e300d0a0d0a534d0d0a0d0a', 'hex');
-var kFakeRequestHeaders = Buffer.from('828684410f7777772e65' + '78616d706c652e636f6d', 'hex');
-var kFakeResponseHeaders = Buffer.from('4803333032580770726976617465611d' + '4d6f6e2c203231204f63742032303133' + '2032303a31333a323120474d546e1768' + '747470733a2f2f7777772e6578616d70' + '6c652e636f6d', 'hex');
-
-function isUint32(val) {
- return val >>> 0 === val;
-}
-
-function isUint24(val) {
- return val >>> 0 === val && val <= 0xFFFFFF;
-}
-
-function isUint8(val) {
- return val >>> 0 === val && val <= 0xFF;
-}
-
-function write32BE(array, pos, val) {
- if (!isUint32(val)) throw new RangeError('val is not a 32-bit number');
- array[pos++] = val >> 24 & 0xff;
- array[pos++] = val >> 16 & 0xff;
- array[pos++] = val >> 8 & 0xff;
- array[pos++] = val & 0xff;
-}
-
-function write24BE(array, pos, val) {
- if (!isUint24(val)) throw new RangeError('val is not a 24-bit number');
- array[pos++] = val >> 16 & 0xff;
- array[pos++] = val >> 8 & 0xff;
- array[pos++] = val & 0xff;
-}
-
-function write8(array, pos, val) {
- if (!isUint8(val)) throw new RangeError('val is not an 8-bit number');
- array[pos] = val;
-}
-
-var Frame =
-/*#__PURE__*/
-function () {
- function Frame(length, type, flags, id) {
- _classCallCheck(this, Frame);
-
- this[kFrameData] = Buffer.alloc(9);
- write24BE(this[kFrameData], 0, length);
- write8(this[kFrameData], 3, type);
- write8(this[kFrameData], 4, flags);
- write32BE(this[kFrameData], 5, id);
- }
-
- _createClass(Frame, [{
- key: "data",
- get: function get() {
- return this[kFrameData];
- }
- }]);
-
- return Frame;
-}();
-
-var SettingsFrame =
-/*#__PURE__*/
-function (_Frame) {
- _inherits(SettingsFrame, _Frame);
-
- function SettingsFrame() {
- var ack = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
-
- _classCallCheck(this, SettingsFrame);
-
- var flags = 0;
- if (ack) flags |= FLAG_ACK;
- return _possibleConstructorReturn(this, _getPrototypeOf(SettingsFrame).call(this, 0, 4, flags, 0));
- }
-
- return SettingsFrame;
-}(Frame);
-
-var DataFrame =
-/*#__PURE__*/
-function (_Frame2) {
- _inherits(DataFrame, _Frame2);
-
- function DataFrame(id, payload) {
- var _this;
-
- var padlen = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
- var final = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
-
- _classCallCheck(this, DataFrame);
-
- var len = payload.length;
- var flags = 0;
- if (final) flags |= FLAG_EOS;
- var buffers = [payload];
-
- if (padlen > 0) {
- buffers.unshift(Buffer.from([padlen]));
- buffers.push(PADDING.slice(0, padlen));
- len += padlen + 1;
- flags |= FLAG_PADDED;
- }
-
- _this = _possibleConstructorReturn(this, _getPrototypeOf(DataFrame).call(this, len, 0, flags, id));
- buffers.unshift(_this[kFrameData]);
- _this[kFrameData] = Buffer.concat(buffers);
- return _this;
- }
-
- return DataFrame;
-}(Frame);
-
-var HeadersFrame =
-/*#__PURE__*/
-function (_Frame3) {
- _inherits(HeadersFrame, _Frame3);
-
- function HeadersFrame(id, payload) {
- var _this2;
-
- var padlen = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
- var final = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
-
- _classCallCheck(this, HeadersFrame);
-
- var len = payload.length;
- var flags = FLAG_EOH;
- if (final) flags |= FLAG_EOS;
- var buffers = [payload];
-
- if (padlen > 0) {
- buffers.unshift(Buffer.from([padlen]));
- buffers.push(PADDING.slice(0, padlen));
- len += padlen + 1;
- flags |= FLAG_PADDED;
- }
-
- _this2 = _possibleConstructorReturn(this, _getPrototypeOf(HeadersFrame).call(this, len, 1, flags, id));
- buffers.unshift(_this2[kFrameData]);
- _this2[kFrameData] = Buffer.concat(buffers);
- return _this2;
- }
-
- return HeadersFrame;
-}(Frame);
-
-var PingFrame =
-/*#__PURE__*/
-function (_Frame4) {
- _inherits(PingFrame, _Frame4);
-
- function PingFrame() {
- var _this3;
-
- var ack = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
-
- _classCallCheck(this, PingFrame);
-
- var buffers = [Buffer.alloc(8)];
- _this3 = _possibleConstructorReturn(this, _getPrototypeOf(PingFrame).call(this, 8, 6, ack ? 1 : 0, 0));
- buffers.unshift(_this3[kFrameData]);
- _this3[kFrameData] = Buffer.concat(buffers);
- return _this3;
- }
-
- return PingFrame;
-}(Frame);
-
-var AltSvcFrame =
-/*#__PURE__*/
-function (_Frame5) {
- _inherits(AltSvcFrame, _Frame5);
-
- function AltSvcFrame(size) {
- var _this4;
-
- _classCallCheck(this, AltSvcFrame);
-
- var buffers = [Buffer.alloc(size)];
- _this4 = _possibleConstructorReturn(this, _getPrototypeOf(AltSvcFrame).call(this, size, 10, 0, 0));
- buffers.unshift(_this4[kFrameData]);
- _this4[kFrameData] = Buffer.concat(buffers);
- return _this4;
- }
-
- return AltSvcFrame;
-}(Frame);
-
-module.exports = {
- Frame: Frame,
- AltSvcFrame: AltSvcFrame,
- DataFrame: DataFrame,
- HeadersFrame: HeadersFrame,
- SettingsFrame: SettingsFrame,
- PingFrame: PingFrame,
- kFakeRequestHeaders: kFakeRequestHeaders,
- kFakeResponseHeaders: kFakeResponseHeaders,
- kClientMagic: kClientMagic
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/index.js b/test/common/index.js
index 8c7f9d030a..1c30eab135 100644
--- a/test/common/index.js
+++ b/test/common/index.js
@@ -1,26 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
-
-function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); }
-
-function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance"); }
-
-function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); }
-
-function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } }
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -42,408 +19,475 @@ for (var i in util) {
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
-/* eslint-disable node-core/required-modules, node-core/crypto-check */
-
-
-'use strict';
-/**/
+/* eslint-disable node-core/crypto-check */
+'use strict'
+const process = global.process // Some tests tamper with the process global.
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
+const assert = require('assert')
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var process = global.process; // Some tests tamper with the process global.
-
-var path = require('path');
-
-var fs = require('fs');
+const { exec, execSync, spawnSync } = require('child_process')
-var assert = require('assert');
+const fs = require('fs') // Do not require 'os' until needed so that test-os-checked-function can
+// monkey patch it. If 'os' is required here, that test will fail.
-var os = require('os');
+const path = require('path')
-var _require = require('child_process'),
- exec = _require.exec,
- execSync = _require.execSync,
- spawnSync = _require.spawnSync;
-/**/
+const util = require('util')
+const { isMainThread } = require('worker_threads')
-var util = require('core-util-is');
+const tmpdir = require('./tmpdir')
-util.inherits = require('inherits');
-/**/
+const bits = ['arm64', 'mips', 'mipsel', 'ppc64', 'riscv64', 's390x', 'x64'].includes(process.arch) ? 64 : 32
+const hasIntl = !!process.config.variables.v8_enable_i18n_support
-var Timer = {
- now: function now() {}
-};
-
-var tmpdir = require('./tmpdir');
-
-var _process$binding = process.binding('config'),
- bits = _process$binding.bits,
- hasIntl = _process$binding.hasIntl;
-
-var noop = function noop() {};
-
-var hasCrypto = true;
+const { atob, btoa } = require('buffer') // Some tests assume a umask of 0o022 so set that up front. Tests that need a
+// different umask will set it themselves.
+//
+// Workers can read, but not set the umask, so check that this is the main
+// thread.
-var isMainThread = function () {
- if (false) {
- return require('worker_threads').isMainThread;
- } // Worker module not enabled → only a single main thread exists.
+if (isMainThread) process.umask(0o022)
+const noop = () => {}
- return true;
-}(); // Check for flags. Skip this for workers (both, the `cluster` module and
+const hasCrypto = Boolean(process.versions.openssl) && !process.env.NODE_SKIP_CRYPTO
+const hasOpenSSL3 = hasCrypto && require('crypto').constants.OPENSSL_VERSION_NUMBER >= 805306368
+const hasQuic = hasCrypto && !!process.config.variables.openssl_quic // Check for flags. Skip this for workers (both, the `cluster` module and
// `worker_threads`) and child processes.
-
-
-if (false && isMainThread && module.parent && require('cluster').isMaster) {
+// If the binary was built without-ssl then the crypto flags are
+// invalid (bad option). The test itself should handle this case.
+
+if (
+ process.argv.length === 2 &&
+ !process.env.NODE_SKIP_FLAG_CHECK &&
+ isMainThread &&
+ hasCrypto &&
+ require('cluster').isPrimary &&
+ fs.existsSync(process.argv[1])
+) {
// The copyright notice is relatively big and the flags could come afterwards.
- var bytesToRead = 1500;
- var buffer = Buffer.allocUnsafe(bytesToRead);
- var fd = fs.openSync(module.parent.filename, 'r');
- var bytesRead = fs.readSync(fd, buffer, 0, bytesToRead);
- fs.closeSync(fd);
- var source = buffer.toString('utf8', 0, bytesRead);
- var flagStart = source.indexOf('// Flags: --') + 10;
+ const bytesToRead = 1500
+ const buffer = Buffer.allocUnsafe(bytesToRead)
+ const fd = fs.openSync(process.argv[1], 'r')
+ const bytesRead = fs.readSync(fd, buffer, 0, bytesToRead)
+ fs.closeSync(fd)
+ const source = buffer.toString('utf8', 0, bytesRead)
+ const flagStart = source.indexOf('// Flags: --') + 10
if (flagStart !== 9) {
- var flagEnd = source.indexOf('\n', flagStart); // Normalize different EOL.
+ let flagEnd = source.indexOf('\n', flagStart) // Normalize different EOL.
if (source[flagEnd - 1] === '\r') {
- flagEnd--;
+ flagEnd--
}
- var flags = source.substring(flagStart, flagEnd).replace(/_/g, '-').split(' ');
- var args = process.execArgv.map(function (arg) {
- return arg.replace(/_/g, '-');
- });
- var _iteratorNormalCompletion = true;
- var _didIteratorError = false;
- var _iteratorError = undefined;
-
- try {
- for (var _iterator = flags[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
- var flag = _step.value;
+ const flags = source.substring(flagStart, flagEnd).replace(/_/g, '-').split(' ')
+ const args = process.execArgv.map((arg) => arg.replace(/_/g, '-'))
- if (!args.includes(flag) && // If the binary was built without-ssl then the crypto flags are
- // invalid (bad option). The test itself should handle this case.
- hasCrypto && ( // If the binary is build without `intl` the inspect option is
+ for (const flag of flags) {
+ if (
+ !args.includes(flag) && // If the binary is build without `intl` the inspect option is
// invalid. The test itself should handle this case.
- process.config.variables.v8_enable_inspector !== 0 || !flag.startsWith('--inspect'))) {
- throw new Error("Test has to be started with the flag: '".concat(flag, "'"));
+ (process.features.inspector || !flag.startsWith('--inspect'))
+ ) {
+ console.log(
+ 'NOTE: The test started as a child_process using these flags:',
+ util.inspect(flags),
+ 'Use NODE_SKIP_FLAG_CHECK to run the test with the original flags.'
+ )
+ const args = [...flags, ...process.execArgv, ...process.argv.slice(1)]
+ const options = {
+ encoding: 'utf8',
+ stdio: 'inherit'
}
- }
- } catch (err) {
- _didIteratorError = true;
- _iteratorError = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion && _iterator.return != null) {
- _iterator.return();
- }
- } finally {
- if (_didIteratorError) {
- throw _iteratorError;
+ const result = spawnSync(process.execPath, args, options)
+
+ if (result.signal) {
+ process.kill(0, result.signal)
+ } else {
+ process.exit(result.status)
}
}
}
}
}
-var isWindows = process.platform === 'win32';
-var isAIX = process.platform === 'aix';
-var isLinuxPPCBE = process.platform === 'linux' && process.arch === 'ppc64' && os.endianness() === 'BE';
-var isSunOS = process.platform === 'sunos';
-var isFreeBSD = process.platform === 'freebsd';
-var isOpenBSD = process.platform === 'openbsd';
-var isLinux = process.platform === 'linux';
-var isOSX = process.platform === 'darwin';
-var enoughTestMem = os.totalmem() > 0x70000000;
-/* 1.75 Gb */
-
-var cpus = os.cpus().length === 0 ? [{
- speed: 1000
-}] : os.cpus();
-var enoughTestCpu = Array.isArray(cpus) && (cpus.length > 1 || cpus[0].speed > 999);
-var rootDir = isWindows ? 'c:\\' : '/';
-var buildType = 'readable-stream'; // If env var is set then enable async_hook hooks for all tests.
+const isWindows = process.platform === 'win32'
+const isAIX = process.platform === 'aix'
+const isSunOS = process.platform === 'sunos'
+const isFreeBSD = process.platform === 'freebsd'
+const isOpenBSD = process.platform === 'openbsd'
+const isLinux = process.platform === 'linux'
+const isOSX = process.platform === 'darwin'
+
+const isPi = (() => {
+ try {
+ var _$exec
+
+ // Normal Raspberry Pi detection is to find the `Raspberry Pi` string in
+ // the contents of `/sys/firmware/devicetree/base/model` but that doesn't
+ // work inside a container. Match the chipset model number instead.
+ const cpuinfo = fs.readFileSync('/proc/cpuinfo', {
+ encoding: 'utf8'
+ })
+ return (
+ ((_$exec = /^Hardware\s*:\s*(.*)$/im.exec(cpuinfo)) === null || _$exec === undefined ? undefined : _$exec[1]) ===
+ 'BCM2835'
+ )
+ } catch {
+ return false
+ }
+})()
+
+const isDumbTerminal = process.env.TERM === 'dumb'
+const buildType = process.config.target_defaults ? process.config.target_defaults.default_configuration : 'Release' // If env var is set then enable async_hook hooks for all tests.
if (process.env.NODE_TEST_WITH_ASYNC_HOOKS) {
- var destroydIdsList = {};
- var destroyListList = {};
- var initHandles = {};
- var async_wrap = process.binding('async_wrap');
- process.on('exit', function () {
- // iterate through handles to make sure nothing crashes
- for (var k in initHandles) {
- util.inspect(initHandles[k]);
- }
- });
- var _queueDestroyAsyncId = async_wrap.queueDestroyAsyncId;
+ const destroydIdsList = {}
+ const destroyListList = {}
+ const initHandles = {}
+
+ const { internalBinding } = require('internal/test/binding')
+
+ const async_wrap = internalBinding('async_wrap')
+ process.on('exit', () => {
+ // Iterate through handles to make sure nothing crashes
+ for (const k in initHandles) util.inspect(initHandles[k])
+ })
+ const _queueDestroyAsyncId = async_wrap.queueDestroyAsyncId
async_wrap.queueDestroyAsyncId = function queueDestroyAsyncId(id) {
if (destroyListList[id] !== undefined) {
- process._rawDebug(destroyListList[id]);
+ process._rawDebug(destroyListList[id])
- process._rawDebug();
+ process._rawDebug()
- throw new Error("same id added to destroy list twice (".concat(id, ")"));
+ throw new Error(`same id added to destroy list twice (${id})`)
}
- destroyListList[id] = new Error().stack;
-
- _queueDestroyAsyncId(id);
- };
- /*require('async_hooks').createHook({
- init(id, ty, tr, r) {
- if (initHandles[id]) {
- process._rawDebug(
- `Is same resource: ${r === initHandles[id].resource}`);
- process._rawDebug(`Previous stack:\n${initHandles[id].stack}\n`);
- throw new Error(`init called twice for same id (${id})`);
- }
- initHandles[id] = { resource: r, stack: new Error().stack.substr(6) };
- },
- before() { },
- after() { },
- destroy(id) {
- if (destroydIdsList[id] !== undefined) {
- process._rawDebug(destroydIdsList[id]);
- process._rawDebug();
- throw new Error(`destroy called for same id (${id})`);
- }
- destroydIdsList[id] = new Error().stack;
- },
- }).enable();*/
-
-}
-
-var opensslCli = null;
-var inFreeBSDJail = null;
-var localhostIPv4 = null;
-var localIPv6Hosts = isLinux ? [// Debian/Ubuntu
-'ip6-localhost', 'ip6-loopback', // SUSE
-'ipv6-localhost', 'ipv6-loopback', // Typically universal
-'localhost'] : ['localhost'];
-
-var PIPE = function () {
- var localRelative = path.relative(process.cwd(), "".concat(tmpdir.path, "/"));
- var pipePrefix = isWindows ? '\\\\.\\pipe\\' : localRelative;
- var pipeName = "node-test.".concat(process.pid, ".sock");
- return path.join(pipePrefix, pipeName);
-}();
-
-var hasIPv6 = function () {
- var iFaces = os.networkInterfaces();
- var re = isWindows ? /Loopback Pseudo-Interface/ : /lo/;
- return objectKeys(iFaces).some(function (name) {
- return re.test(name) && iFaces[name].some(function (_ref) {
- var family = _ref.family;
- return family === 'IPv6';
- });
- });
-}();
-/*
- * Check that when running a test with
- * `$node --abort-on-uncaught-exception $file child`
- * the process aborts.
- */
+ destroyListList[id] = util.inspect(new Error())
+ _queueDestroyAsyncId(id)
+ }
+
+ require('async_hooks')
+ .createHook({
+ init(id, ty, tr, resource) {
+ if (initHandles[id]) {
+ process._rawDebug(`Is same resource: ${resource === initHandles[id].resource}`)
+
+ process._rawDebug(`Previous stack:\n${initHandles[id].stack}\n`)
+
+ throw new Error(`init called twice for same id (${id})`)
+ }
+
+ initHandles[id] = {
+ resource,
+ stack: util.inspect(new Error()).substr(6)
+ }
+ },
+
+ before() {},
+
+ after() {},
+
+ destroy(id) {
+ if (destroydIdsList[id] !== undefined) {
+ process._rawDebug(destroydIdsList[id])
+
+ process._rawDebug()
+
+ throw new Error(`destroy called for same id (${id})`)
+ }
+
+ destroydIdsList[id] = util.inspect(new Error())
+ }
+ })
+ .enable()
+}
+
+let opensslCli = null
+let inFreeBSDJail = null
+let localhostIPv4 = null
+const localIPv6Hosts = isLinux
+ ? [
+ // Debian/Ubuntu
+ 'ip6-localhost',
+ 'ip6-loopback', // SUSE
+ 'ipv6-localhost',
+ 'ipv6-loopback', // Typically universal
+ 'localhost'
+ ]
+ : ['localhost']
+
+const PIPE = (() => {
+ const localRelative = path.relative(process.cwd(), `${tmpdir.path}/`)
+ const pipePrefix = isWindows ? '\\\\.\\pipe\\' : localRelative
+ const pipeName = `node-test.${process.pid}.sock`
+ return path.join(pipePrefix, pipeName)
+})() // Check that when running a test with
+// `$node --abort-on-uncaught-exception $file child`
+// the process aborts.
function childShouldThrowAndAbort() {
- var testCmd = '';
+ let testCmd = ''
if (!isWindows) {
// Do not create core files, as it can take a lot of disk space on
// continuous testing and developers' machines
- testCmd += 'ulimit -c 0 && ';
+ testCmd += 'ulimit -c 0 && '
}
- testCmd += "\"".concat(process.argv[0], "\" --abort-on-uncaught-exception ");
- testCmd += "\"".concat(process.argv[1], "\" child");
- var child = exec(testCmd);
+ testCmd += `"${process.argv[0]}" --abort-on-uncaught-exception `
+ testCmd += `"${process.argv[1]}" child`
+ const child = exec(testCmd)
child.on('exit', function onExit(exitCode, signal) {
- var errMsg = 'Test should have aborted ' + "but instead exited with exit code ".concat(exitCode) + " and signal ".concat(signal);
- assert(nodeProcessAborted(exitCode, signal), errMsg);
- });
+ const errMsg =
+ 'Test should have aborted ' + `but instead exited with exit code ${exitCode}` + ` and signal ${signal}`
+ assert(nodeProcessAborted(exitCode, signal), errMsg)
+ })
}
function createZeroFilledFile(filename) {
- var fd = fs.openSync(filename, 'w');
- fs.ftruncateSync(fd, 10 * 1024 * 1024);
- fs.closeSync(fd);
+ const fd = fs.openSync(filename, 'w')
+ fs.ftruncateSync(fd, 10 * 1024 * 1024)
+ fs.closeSync(fd)
}
-var pwdCommand = isWindows ? ['cmd.exe', ['/d', '/c', 'cd']] : ['pwd', []];
+const pwdCommand = isWindows ? ['cmd.exe', ['/d', '/c', 'cd']] : ['pwd', []]
function platformTimeout(ms) {
- if (process.features.debug) ms = 2 * ms;
- if (global.__coverage__) ms = 4 * ms;
- if (isAIX) return 2 * ms; // default localhost speed is slower on AIX
+ const multipliers =
+ typeof ms === 'bigint'
+ ? {
+ two: 2n,
+ four: 4n,
+ seven: 7n
+ }
+ : {
+ two: 2,
+ four: 4,
+ seven: 7
+ }
+ if (process.features.debug) ms = multipliers.two * ms
+ if (isAIX) return multipliers.two * ms // Default localhost speed is slower on AIX
+
+ if (isPi) return multipliers.two * ms // Raspberry Pi devices
+
+ return ms
+}
+
+let knownGlobals = [
+ typeof AggregateError !== 'undefined' ? AggregateError : require('../../lib/ours/util').AggregateError,
+ typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController,
+ typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal,
+ typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget,
+ atob,
+ btoa,
+ clearImmediate,
+ clearInterval,
+ clearTimeout,
+ global,
+ setImmediate,
+ setInterval,
+ setTimeout,
+ queueMicrotask
+] // TODO(@jasnell): This check can be temporary. AbortController is
+// not currently supported in either Node.js 12 or 10, making it
+// difficult to run tests comparatively on those versions. Once
+// all supported versions have AbortController as a global, this
+// check can be removed and AbortController can be added to the
+// knownGlobals list above.
+
+if (global.AbortController) knownGlobals.push(global.AbortController)
- if (process.arch !== 'arm') return ms;
- var armv = process.config.variables.arm_version;
- if (armv === '6') return 7 * ms; // ARMv6
+if (global.gc) {
+ knownGlobals.push(global.gc)
+}
- if (armv === '7') return 2 * ms; // ARMv7
+if (global.performance) {
+ knownGlobals.push(global.performance)
+}
- return ms; // ARMv8+
+if (global.PerformanceMark) {
+ knownGlobals.push(global.PerformanceMark)
}
-var knownGlobals = [Buffer, clearImmediate, clearInterval, clearTimeout, global, process, setImmediate, setInterval, setTimeout];
+if (global.PerformanceMeasure) {
+ knownGlobals.push(global.PerformanceMeasure)
+} // TODO(@ethan-arrowood): Similar to previous checks, this can be temporary
+// until v16.x is EOL. Once all supported versions have structuredClone we
+// can add this to the list above instead.
-if (global.gc) {
- knownGlobals.push(global.gc);
+if (global.structuredClone) {
+ knownGlobals.push(global.structuredClone)
}
-if (global.DTRACE_HTTP_SERVER_RESPONSE) {
- knownGlobals.push(DTRACE_HTTP_SERVER_RESPONSE);
- knownGlobals.push(DTRACE_HTTP_SERVER_REQUEST);
- knownGlobals.push(DTRACE_HTTP_CLIENT_RESPONSE);
- knownGlobals.push(DTRACE_HTTP_CLIENT_REQUEST);
- knownGlobals.push(DTRACE_NET_STREAM_END);
- knownGlobals.push(DTRACE_NET_SERVER_CONNECTION);
+if (global.fetch) {
+ knownGlobals.push(fetch)
}
-if (global.COUNTER_NET_SERVER_CONNECTION) {
- knownGlobals.push(COUNTER_NET_SERVER_CONNECTION);
- knownGlobals.push(COUNTER_NET_SERVER_CONNECTION_CLOSE);
- knownGlobals.push(COUNTER_HTTP_SERVER_REQUEST);
- knownGlobals.push(COUNTER_HTTP_SERVER_RESPONSE);
- knownGlobals.push(COUNTER_HTTP_CLIENT_REQUEST);
- knownGlobals.push(COUNTER_HTTP_CLIENT_RESPONSE);
+if (hasCrypto && global.crypto) {
+ knownGlobals.push(global.crypto)
+ knownGlobals.push(global.Crypto)
+ knownGlobals.push(global.CryptoKey)
+ knownGlobals.push(global.SubtleCrypto)
}
-if (process.env.NODE_TEST_KNOWN_GLOBALS) {
- var knownFromEnv = process.env.NODE_TEST_KNOWN_GLOBALS.split(',');
- allowGlobals.apply(void 0, _toConsumableArray(knownFromEnv));
+if (global.ReadableStream) {
+ knownGlobals.push(
+ global.ReadableStream,
+ global.ReadableStreamDefaultReader,
+ global.ReadableStreamBYOBReader,
+ global.ReadableStreamBYOBRequest,
+ global.ReadableByteStreamController,
+ global.ReadableStreamDefaultController,
+ global.TransformStream,
+ global.TransformStreamDefaultController,
+ global.WritableStream,
+ global.WritableStreamDefaultWriter,
+ global.WritableStreamDefaultController,
+ global.ByteLengthQueuingStrategy,
+ global.CountQueuingStrategy,
+ global.TextEncoderStream,
+ global.TextDecoderStream,
+ global.CompressionStream,
+ global.DecompressionStream
+ )
}
-function allowGlobals() {
- for (var _len = arguments.length, whitelist = new Array(_len), _key = 0; _key < _len; _key++) {
- whitelist[_key] = arguments[_key];
- }
-
- knownGlobals = knownGlobals.concat(whitelist);
+function allowGlobals(...allowlist) {
+ knownGlobals = knownGlobals.concat(allowlist)
}
-/**/
+if (process.env.NODE_TEST_KNOWN_GLOBALS !== '0') {
+ if (process.env.NODE_TEST_KNOWN_GLOBALS) {
+ const knownFromEnv = process.env.NODE_TEST_KNOWN_GLOBALS.split(',')
+ allowGlobals(...knownFromEnv)
+ }
-if (typeof constructor == 'function') knownGlobals.push(constructor);
-if (typeof DTRACE_NET_SOCKET_READ == 'function') knownGlobals.push(DTRACE_NET_SOCKET_READ);
-if (typeof DTRACE_NET_SOCKET_WRITE == 'function') knownGlobals.push(DTRACE_NET_SOCKET_WRITE);
-if (global.__coverage__) knownGlobals.push(__coverage__);
-'console,clearImmediate,setImmediate,core,__core-js_shared__,Promise,Map,Set,WeakMap,WeakSet,Reflect,System,queueMicrotask,asap,Observable,regeneratorRuntime,_babelPolyfill'.split(',').filter(function (item) {
- return typeof global[item] !== undefined;
-}).forEach(function (item) {
- knownGlobals.push(global[item]);
-});
-/**/
-
-function leakedGlobals() {
- var leaked = [];
+ function leakedGlobals() {
+ const leaked = []
- for (var val in global) {
- if (!knownGlobals.includes(global[val])) {
- leaked.push(val);
+ for (const val in global) {
+ if (!knownGlobals.includes(global[val])) {
+ leaked.push(val)
+ }
}
- }
- if (global.__coverage__) {
- return leaked.filter(function (varname) {
- return !/^(?:cov_|__cov)/.test(varname);
- });
- } else {
- return leaked;
+ return leaked
}
-}
-process.on('exit', function () {
- var leaked = leakedGlobals();
+ process.on('exit', function () {
+ const leaked = leakedGlobals()
- if (leaked.length > 0) {
- assert.fail("Unexpected global(s) found: ".concat(leaked.join(', ')));
- }
-});
-var mustCallChecks = [];
+ if (leaked.length > 0) {
+ assert.fail(`Unexpected global(s) found: ${leaked.join(', ')}`)
+ }
+ })
+}
+
+const mustCallChecks = []
function runCallChecks(exitCode) {
- if (exitCode !== 0) return;
- var failed = mustCallChecks.filter(function (context) {
+ if (exitCode !== 0) return
+ const failed = mustCallChecks.filter(function (context) {
if ('minimum' in context) {
- context.messageSegment = "at least ".concat(context.minimum);
- return context.actual < context.minimum;
- } else {
- context.messageSegment = "exactly ".concat(context.exact);
- return context.actual !== context.exact;
+ context.messageSegment = `at least ${context.minimum}`
+ return context.actual < context.minimum
}
- });
- forEach(failed, function (context) {
- console.log('Mismatched %s function calls. Expected %s, actual %d.', context.name, context.messageSegment, context.actual);
- console.log(context.stack.split('\n').slice(2).join('\n'));
- });
- if (failed.length) process.exit(1);
+
+ context.messageSegment = `exactly ${context.exact}`
+ return context.actual !== context.exact
+ })
+ failed.forEach(function (context) {
+ console.log(
+ 'Mismatched %s function calls. Expected %s, actual %d.',
+ context.name,
+ context.messageSegment,
+ context.actual
+ )
+ console.log(context.stack.split('\n').slice(2).join('\n'))
+ })
+ if (failed.length) process.exit(1)
}
function mustCall(fn, exact) {
- return _mustCallInner(fn, exact, 'exact');
+ return _mustCallInner(fn, exact, 'exact')
}
-function mustCallAtLeast(fn, minimum) {
- return _mustCallInner(fn, minimum, 'minimum');
+function mustSucceed(fn, exact) {
+ return mustCall(function (err, ...args) {
+ assert.ifError(err)
+ if (typeof fn === 'function') return fn.apply(this, args)
+ }, exact)
}
-function _mustCallInner(fn) {
- var _context;
+function mustCallAtLeast(fn, minimum) {
+ return _mustCallInner(fn, minimum, 'minimum')
+}
- var criteria = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1;
- var field = arguments.length > 2 ? arguments[2] : undefined;
- if (process._exiting) throw new Error('Cannot use common.mustCall*() in process exit handler');
+function _mustCallInner(fn, criteria = 1, field) {
+ if (process._exiting) throw new Error('Cannot use common.mustCall*() in process exit handler')
if (typeof fn === 'number') {
- criteria = fn;
- fn = noop;
+ criteria = fn
+ fn = noop
} else if (fn === undefined) {
- fn = noop;
+ fn = noop
}
- if (typeof criteria !== 'number') throw new TypeError("Invalid ".concat(field, " value: ").concat(criteria));
- var context = (_context = {}, _defineProperty(_context, field, criteria), _defineProperty(_context, "actual", 0), _defineProperty(_context, "stack", new Error().stack), _defineProperty(_context, "name", fn.name || ''), _context); // add the exit listener only once to avoid listener leak warnings
-
- if (mustCallChecks.length === 0) process.on('exit', runCallChecks);
- mustCallChecks.push(context);
- return function () {
- context.actual++;
- return fn.apply(this, arguments);
- };
+ if (typeof criteria !== 'number') throw new TypeError(`Invalid ${field} value: ${criteria}`)
+ const context = {
+ [field]: criteria,
+ actual: 0,
+ stack: util.inspect(new Error()),
+ name: fn.name || ''
+ } // Add the exit listener only once to avoid listener leak warnings
+
+ if (mustCallChecks.length === 0) process.on('exit', runCallChecks)
+ mustCallChecks.push(context)
+
+ const _return = function () {
+ // eslint-disable-line func-style
+ context.actual++
+ return fn.apply(this, arguments)
+ } // Function instances have own properties that may be relevant.
+ // Let's replicate those properties to the returned function.
+ // Refs: https://tc39.es/ecma262/#sec-function-instances
+
+ Object.defineProperties(_return, {
+ name: {
+ value: fn.name,
+ writable: false,
+ enumerable: false,
+ configurable: true
+ },
+ length: {
+ value: fn.length,
+ writable: false,
+ enumerable: false,
+ configurable: true
+ }
+ })
+ return _return
}
function hasMultiLocalhost() {
- var _process$binding2 = process.binding('tcp_wrap'),
- TCP = _process$binding2.TCP,
- TCPConstants = _process$binding2.constants;
+ const { internalBinding } = require('internal/test/binding')
- var t = new TCP(TCPConstants.SOCKET);
- var ret = t.bind('127.0.0.2', 0);
- t.close();
- return ret === 0;
+ const { TCP, constants: TCPConstants } = internalBinding('tcp_wrap')
+ const t = new TCP(TCPConstants.SOCKET)
+ const ret = t.bind('127.0.0.2', 0)
+ t.close()
+ return ret === 0
}
function skipIfEslintMissing() {
if (!fs.existsSync(path.join(__dirname, '..', '..', 'tools', 'node_modules', 'eslint'))) {
- skip('missing ESLint');
+ skip('missing ESLint')
}
}
@@ -455,72 +499,70 @@ function canCreateSymLink() {
// whoami.exe needs to be the one from System32
// If unix tools are in the path, they can shadow the one we want,
// so use the full path while executing whoami
- var whoamiPath = path.join(process.env.SystemRoot, 'System32', 'whoami.exe');
+ const whoamiPath = path.join(process.env.SystemRoot, 'System32', 'whoami.exe')
try {
- var output = execSync("".concat(whoamiPath, " /priv"), {
- timout: 1000
- });
- return output.includes('SeCreateSymbolicLinkPrivilege');
- } catch (_e) {
- return false;
+ const output = execSync(`${whoamiPath} /priv`, {
+ timeout: 1000
+ })
+ return output.includes('SeCreateSymbolicLinkPrivilege')
+ } catch {
+ return false
}
} // On non-Windows platforms, this always returns `true`
-
- return true;
+ return true
}
function getCallSite(top) {
- var originalStackFormatter = Error.prepareStackTrace;
+ const originalStackFormatter = Error.prepareStackTrace
- Error.prepareStackTrace = function (err, stack) {
- return "".concat(stack[0].getFileName(), ":").concat(stack[0].getLineNumber());
- };
+ Error.prepareStackTrace = (err, stack) => `${stack[0].getFileName()}:${stack[0].getLineNumber()}`
- var err = new Error();
- Error.captureStackTrace(err, top); // with the V8 Error API, the stack is not formatted until it is accessed
+ const err = new Error()
+ Error.captureStackTrace(err, top) // With the V8 Error API, the stack is not formatted until it is accessed
- err.stack;
- Error.prepareStackTrace = originalStackFormatter;
- return err.stack;
+ err.stack // eslint-disable-line no-unused-expressions
+
+ Error.prepareStackTrace = originalStackFormatter
+ return err.stack
}
function mustNotCall(msg) {
- var callSite = getCallSite(mustNotCall);
- return function mustNotCall() {
- assert.fail("".concat(msg || 'function should not have been called', " at ").concat(callSite));
- };
+ const callSite = getCallSite(mustNotCall)
+ return function mustNotCall(...args) {
+ const argsInfo = args.length > 0 ? `\ncalled with arguments: ${args.map(util.inspect).join(', ')}` : ''
+ assert.fail(`${msg || 'function should not have been called'} at ${callSite}` + argsInfo)
+ }
}
function printSkipMessage(msg) {
- console.log("1..0 # Skipped: ".concat(msg));
+ console.log(`1..0 # Skipped: ${msg}`)
}
function skip(msg) {
- printSkipMessage(msg);
- process.exit(0);
+ printSkipMessage(msg)
+ process.exit(0)
} // Returns true if the exit code "exitCode" and/or signal name "signal"
// represent the exit code and/or signal name of a node process that aborted,
// false otherwise.
-
function nodeProcessAborted(exitCode, signal) {
// Depending on the compiler used, node will exit with either
// exit code 132 (SIGILL), 133 (SIGTRAP) or 134 (SIGABRT).
- var expectedExitCodes = [132, 133, 134]; // On platforms using KSH as the default shell (like SmartOS),
+ let expectedExitCodes = [132, 133, 134] // On platforms using KSH as the default shell (like SmartOS),
// when a process aborts, KSH exits with an exit code that is
// greater than 256, and thus the exit code emitted with the 'exit'
// event is null and the signal is set to either SIGILL, SIGTRAP,
// or SIGABRT (depending on the compiler).
- var expectedSignals = ['SIGILL', 'SIGTRAP', 'SIGABRT']; // On Windows, 'aborts' are of 2 types, depending on the context:
- // (i) Forced access violation, if --abort-on-uncaught-exception is on
- // which corresponds to exit code 3221225477 (0xC0000005)
+ const expectedSignals = ['SIGILL', 'SIGTRAP', 'SIGABRT'] // On Windows, 'aborts' are of 2 types, depending on the context:
+ // (i) Exception breakpoint, if --abort-on-uncaught-exception is on
+ // which corresponds to exit code 2147483651 (0x80000003)
// (ii) Otherwise, _exit(134) which is called in place of abort() due to
// raising SIGABRT exiting with ambiguous exit code '3' by default
- if (isWindows) expectedExitCodes = [0xC0000005, 134]; // When using --abort-on-uncaught-exception, V8 will use
+ if (isWindows) expectedExitCodes = [0x80000003, 134] // When using --abort-on-uncaught-exception, V8 will use
// base::OS::Abort to terminate the process.
// Depending on the compiler used, the shell or other aspects of
// the platform used to build the node binary, this will actually
@@ -529,422 +571,410 @@ function nodeProcessAborted(exitCode, signal) {
// the expected exit codes or signals.
if (signal !== null) {
- return expectedSignals.includes(signal);
- } else {
- return expectedExitCodes.includes(exitCode);
+ return expectedSignals.includes(signal)
}
-}
-
-function busyLoop(time) {
- var startTime = Timer.now();
- var stopTime = startTime + time;
- while (Timer.now() < stopTime) {}
+ return expectedExitCodes.includes(exitCode)
}
function isAlive(pid) {
try {
- process.kill(pid, 'SIGCONT');
- return true;
- } catch (_unused) {
- return false;
+ process.kill(pid, 'SIGCONT')
+ return true
+ } catch {
+ return false
}
}
-function _expectWarning(name, expected) {
- var map = new Map(expected);
- return mustCall(function (warning) {
- assert.strictEqual(warning.name, name);
- assert.ok(map.has(warning.message), "unexpected error message: \"".concat(warning.message, "\""));
- var code = map.get(warning.message);
- assert.strictEqual(warning.code, code); // Remove a warning message after it is seen so that we guarantee that we
- // get each message only once.
-
- map.delete(expected);
- }, expected.length);
-}
-
-function expectWarningByName(name, expected, code) {
+function _expectWarning(name, expected, code) {
if (typeof expected === 'string') {
- expected = [[expected, code]];
+ expected = [[expected, code]]
+ } else if (!Array.isArray(expected)) {
+ expected = Object.entries(expected).map(([a, b]) => [b, a])
+ } else if (!Array.isArray(expected[0])) {
+ expected = [[expected[0], expected[1]]]
+ } // Deprecation codes are mandatory, everything else is not.
+
+ if (name === 'DeprecationWarning') {
+ expected.forEach(([_, code]) => assert(code, expected))
}
- process.on('warning', _expectWarning(name, expected));
-}
-
-function expectWarningByMap(warningMap) {
- var catchWarning = {};
- forEach(objectKeys(warningMap), function (name) {
- var expected = warningMap[name];
+ return mustCall((warning) => {
+ const expectedProperties = expected.shift()
- if (!Array.isArray(expected)) {
- throw new Error('warningMap entries must be arrays consisting of two ' + 'entries: [message, warningCode]');
+ if (!expectedProperties) {
+ assert.fail(`Unexpected extra warning received: ${warning}`)
}
- if (!Array.isArray(expected[0])) {
- if (expected.length === 0) {
- return;
- }
+ const [message, code] = expectedProperties
+ assert.strictEqual(warning.name, name)
- expected = [[expected[0], expected[1]]];
+ if (typeof message === 'string') {
+ assert.strictEqual(warning.message, message)
+ } else {
+ assert.match(warning.message, message)
}
- catchWarning[name] = _expectWarning(name, expected);
- });
- process.on('warning', function (warning) {
- return catchWarning[warning.name](warning);
- });
-} // accepts a warning name and description or array of descriptions or a map
-// of warning names to description(s)
-// ensures a warning is generated for each name/description pair
-
-
-function expectWarning(nameOrMap, expected, code) {
- if (typeof nameOrMap === 'string') {
- expectWarningByName(nameOrMap, expected, code);
- } else {
- expectWarningByMap(nameOrMap);
- }
+ assert.strictEqual(warning.code, code)
+ }, expected.length)
}
-var Comparison = function Comparison(obj, keys) {
- _classCallCheck(this, Comparison);
-
- var _iteratorNormalCompletion2 = true;
- var _didIteratorError2 = false;
- var _iteratorError2 = undefined;
+let catchWarning // Accepts a warning name and description or array of descriptions or a map of
+// warning names to description(s) ensures a warning is generated for each
+// name/description pair.
+// The expected messages have to be unique per `expectWarning()` call.
- try {
- for (var _iterator2 = keys[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
- var key = _step2.value;
- if (key in obj) this[key] = obj[key];
- }
- } catch (err) {
- _didIteratorError2 = true;
- _iteratorError2 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion2 && _iterator2.return != null) {
- _iterator2.return();
- }
- } finally {
- if (_didIteratorError2) {
- throw _iteratorError2;
+function expectWarning(nameOrMap, expected, code) {
+ if (catchWarning === undefined) {
+ catchWarning = {}
+ process.on('warning', (warning) => {
+ if (!catchWarning[warning.name]) {
+ throw new TypeError(`"${warning.name}" was triggered without being expected.\n` + util.inspect(warning))
}
- }
- }
-}; // Useful for testing expected internal/error objects
+ catchWarning[warning.name](warning)
+ })
+ }
-function expectsError(fn, settings, exact) {
- if (typeof fn !== 'function') {
- exact = settings;
- settings = fn;
- fn = undefined;
+ if (typeof nameOrMap === 'string') {
+ catchWarning[nameOrMap] = _expectWarning(nameOrMap, expected, code)
+ } else {
+ Object.keys(nameOrMap).forEach((name) => {
+ catchWarning[name] = _expectWarning(name, nameOrMap[name])
+ })
}
+} // Useful for testing expected internal/error objects
- function innerFn(error) {
- if (arguments.length !== 1) {
- // Do not use `assert.strictEqual()` to prevent `util.inspect` from
+function expectsError(validator, exact) {
+ return mustCall((...args) => {
+ if (args.length !== 1) {
+ // Do not use `assert.strictEqual()` to prevent `inspect` from
// always being called.
- assert.fail("Expected one argument, got ".concat(util.inspect(arguments)));
+ assert.fail(`Expected one argument, got ${util.inspect(args)}`)
}
- var descriptor = Object.getOwnPropertyDescriptor(error, 'message'); // The error message should be non-enumerable
-
- assert.strictEqual(descriptor.enumerable, false);
- var innerSettings = settings;
-
- if ('type' in settings) {
- var type = settings.type;
-
- if (type !== Error && !Error.isPrototypeOf(type)) {
- throw new TypeError('`settings.type` must inherit from `Error`');
- }
-
- var _constructor = error.constructor;
-
- if (_constructor.name === 'NodeError' && type.name !== 'NodeError') {
- _constructor = Object.getPrototypeOf(error.constructor);
- } // Add the `type` to the error to properly compare and visualize it.
-
-
- if (!('type' in error)) error.type = _constructor;
- }
-
- if ('message' in settings && typeof settings.message === 'object' && settings.message.test(error.message)) {
- // Make a copy so we are able to modify the settings.
- innerSettings = Object.create(settings, Object.getOwnPropertyDescriptors(settings)); // Visualize the message as identical in case of other errors.
-
- innerSettings.message = error.message;
- } // Check all error properties.
-
-
- var keys = objectKeys(settings);
- var _iteratorNormalCompletion3 = true;
- var _didIteratorError3 = false;
- var _iteratorError3 = undefined;
-
- try {
- for (var _iterator3 = keys[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
- var key = _step3.value;
-
- if (!require('deep-strict-equal')(error[key], innerSettings[key])) {
- // Create placeholder objects to create a nice output.
- var a = new Comparison(error, keys);
- var b = new Comparison(innerSettings, keys);
- var tmpLimit = Error.stackTraceLimit;
- Error.stackTraceLimit = 0;
- var err = new assert.AssertionError({
- actual: a,
- expected: b,
- operator: 'strictEqual',
- stackStartFn: assert.throws
- });
- Error.stackTraceLimit = tmpLimit;
- throw new assert.AssertionError({
- actual: error,
- expected: settings,
- operator: 'common.expectsError',
- message: err.message
- });
- }
- }
- } catch (err) {
- _didIteratorError3 = true;
- _iteratorError3 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion3 && _iterator3.return != null) {
- _iterator3.return();
- }
- } finally {
- if (_didIteratorError3) {
- throw _iteratorError3;
- }
- }
- }
-
- return true;
- }
-
- if (fn) {
- assert.throws(fn, innerFn);
- return;
- }
+ const error = args.pop()
+ const descriptor = Object.getOwnPropertyDescriptor(error, 'message') // The error message should be non-enumerable
- return mustCall(innerFn, exact);
+ assert.strictEqual(descriptor.enumerable, false)
+ assert.throws(() => {
+ throw error
+ }, validator)
+ return true
+ }, exact)
}
function skipIfInspectorDisabled() {
- if (process.config.variables.v8_enable_inspector === 0) {
- skip('V8 inspector is disabled');
+ if (!process.features.inspector) {
+ skip('V8 inspector is disabled')
}
}
function skipIf32Bits() {
if (bits < 64) {
- skip('The tested feature is not available in 32bit builds');
+ skip('The tested feature is not available in 32bit builds')
}
}
function skipIfWorker() {
if (!isMainThread) {
- skip('This test only works on a main thread');
+ skip('This test only works on a main thread')
}
}
function getArrayBufferViews(buf) {
- var buffer = buf.buffer,
- byteOffset = buf.byteOffset,
- byteLength = buf.byteLength;
- var out = [];
- var arrayBufferViews = [Int8Array, Uint8Array, Uint8ClampedArray, Int16Array, Uint16Array, Int32Array, Uint32Array, Float32Array, Float64Array, DataView];
-
- for (var _i = 0, _arrayBufferViews = arrayBufferViews; _i < _arrayBufferViews.length; _i++) {
- var type = _arrayBufferViews[_i];
- var _type$BYTES_PER_ELEME = type.BYTES_PER_ELEMENT,
- BYTES_PER_ELEMENT = _type$BYTES_PER_ELEME === void 0 ? 1 : _type$BYTES_PER_ELEME;
+ const { buffer, byteOffset, byteLength } = buf
+ const out = []
+ const arrayBufferViews = [
+ Int8Array,
+ Uint8Array,
+ Uint8ClampedArray,
+ Int16Array,
+ Uint16Array,
+ Int32Array,
+ Uint32Array,
+ Float32Array,
+ Float64Array,
+ DataView
+ ]
+
+ for (const type of arrayBufferViews) {
+ const { BYTES_PER_ELEMENT = 1 } = type
if (byteLength % BYTES_PER_ELEMENT === 0) {
- out.push(new type(buffer, byteOffset, byteLength / BYTES_PER_ELEMENT));
+ out.push(new type(buffer, byteOffset, byteLength / BYTES_PER_ELEMENT))
}
}
- return out;
+ return out
}
function getBufferSources(buf) {
- return [].concat(_toConsumableArray(getArrayBufferViews(buf)), [new Uint8Array(buf).buffer]);
-} // Crash the process on unhandled rejections.
-
-
-var crashOnUnhandledRejection = function crashOnUnhandledRejection(err) {
- throw err;
-};
-
-process.on('unhandledRejection', crashOnUnhandledRejection);
-
-function disableCrashOnUnhandledRejection() {
- process.removeListener('unhandledRejection', crashOnUnhandledRejection);
+ return [...getArrayBufferViews(buf), new Uint8Array(buf).buffer]
}
function getTTYfd() {
// Do our best to grab a tty fd.
- var tty = require('tty'); // Don't attempt fd 0 as it is not writable on Windows.
+ const tty = require('tty') // Don't attempt fd 0 as it is not writable on Windows.
// Ref: ef2861961c3d9e9ed6972e1e84d969683b25cf95
-
- var ttyFd = [1, 2, 4, 5].find(tty.isatty);
+ const ttyFd = [1, 2, 4, 5].find(tty.isatty)
if (ttyFd === undefined) {
try {
- return fs.openSync('/dev/tty');
- } catch (_unused2) {
+ return fs.openSync('/dev/tty')
+ } catch {
// There aren't any tty fd's available to use.
- return -1;
+ return -1
}
}
- return ttyFd;
+ return ttyFd
}
function runWithInvalidFD(func) {
- var fd = 1 << 30; // Get first known bad file descriptor. 1 << 30 is usually unlikely to
+ let fd = 1 << 30 // Get first known bad file descriptor. 1 << 30 is usually unlikely to
// be an valid one.
try {
- while (fs.fstatSync(fd--) && fd > 0) {
- ;
+ while (fs.fstatSync(fd--) && fd > 0);
+ } catch {
+ return func(fd)
+ }
+
+ printSkipMessage('Could not generate an invalid fd')
+} // A helper function to simplify checking for ERR_INVALID_ARG_TYPE output.
+
+function invalidArgTypeHelper(input) {
+ if (input == null) {
+ return ` Received ${input}`
+ }
+
+ if (typeof input === 'function' && input.name) {
+ return ` Received function ${input.name}`
+ }
+
+ if (typeof input === 'object') {
+ if (input.constructor && input.constructor.name) {
+ return ` Received an instance of ${input.constructor.name}`
+ }
+
+ return ` Received ${util.inspect(input, {
+ depth: -1
+ })}`
+ }
+
+ let inspected = util.inspect(input, {
+ colors: false
+ })
+ if (inspected.length > 25) inspected = `${inspected.slice(0, 25)}...`
+ return ` Received type ${typeof input} (${inspected})`
+}
+
+function skipIfDumbTerminal() {
+ if (isDumbTerminal) {
+ skip('skipping - dumb terminal')
+ }
+}
+
+function gcUntil(name, condition) {
+ if (typeof name === 'function') {
+ condition = name
+ name = undefined
+ }
+
+ return new Promise((resolve, reject) => {
+ let count = 0
+
+ function gcAndCheck() {
+ setImmediate(() => {
+ count++
+ global.gc()
+
+ if (condition()) {
+ resolve()
+ } else if (count < 10) {
+ gcAndCheck()
+ } else {
+ reject(name === undefined ? undefined : 'Test ' + name + ' failed')
+ }
+ })
+ }
+
+ gcAndCheck()
+ })
+}
+
+function requireNoPackageJSONAbove(dir = __dirname) {
+ let possiblePackage = path.join(dir, '..', 'package.json')
+ let lastPackage = null
+
+ while (possiblePackage !== lastPackage) {
+ if (fs.existsSync(possiblePackage)) {
+ assert.fail(
+ "This test shouldn't load properties from a package.json above " +
+ `its file location. Found package.json at ${possiblePackage}.`
+ )
}
- } catch (_unused3) {
- return func(fd);
+
+ lastPackage = possiblePackage
+ possiblePackage = path.join(possiblePackage, '..', '..', 'package.json')
}
+}
- printSkipMessage('Could not generate an invalid fd');
-}
-
-module.exports = {
- allowGlobals: allowGlobals,
- buildType: buildType,
- busyLoop: busyLoop,
- canCreateSymLink: canCreateSymLink,
- childShouldThrowAndAbort: childShouldThrowAndAbort,
- createZeroFilledFile: createZeroFilledFile,
- disableCrashOnUnhandledRejection: disableCrashOnUnhandledRejection,
- enoughTestCpu: enoughTestCpu,
- enoughTestMem: enoughTestMem,
- expectsError: expectsError,
- expectWarning: expectWarning,
- getArrayBufferViews: getArrayBufferViews,
- getBufferSources: getBufferSources,
- getCallSite: getCallSite,
- getTTYfd: getTTYfd,
- hasIntl: hasIntl,
- hasCrypto: hasCrypto,
- hasIPv6: hasIPv6,
- hasMultiLocalhost: hasMultiLocalhost,
- isAIX: isAIX,
- isAlive: isAlive,
- isFreeBSD: isFreeBSD,
- isLinux: isLinux,
- isLinuxPPCBE: isLinuxPPCBE,
- isMainThread: isMainThread,
- isOpenBSD: isOpenBSD,
- isOSX: isOSX,
- isSunOS: isSunOS,
- isWindows: isWindows,
- localIPv6Hosts: localIPv6Hosts,
- mustCall: mustCall,
- mustCallAtLeast: mustCallAtLeast,
- mustNotCall: mustNotCall,
- nodeProcessAborted: nodeProcessAborted,
- noWarnCode: undefined,
- PIPE: PIPE,
- platformTimeout: platformTimeout,
- printSkipMessage: printSkipMessage,
- pwdCommand: pwdCommand,
- rootDir: rootDir,
- runWithInvalidFD: runWithInvalidFD,
- skip: skip,
- skipIf32Bits: skipIf32Bits,
- skipIfEslintMissing: skipIfEslintMissing,
- skipIfInspectorDisabled: skipIfInspectorDisabled,
- skipIfWorker: skipIfWorker,
-
- get localhostIPv6() {
- return '::1';
+const common = {
+ allowGlobals,
+ buildType,
+ canCreateSymLink,
+ childShouldThrowAndAbort,
+ createZeroFilledFile,
+ expectsError,
+ expectWarning,
+ gcUntil,
+ getArrayBufferViews,
+ getBufferSources,
+ getCallSite,
+ getTTYfd,
+ hasIntl,
+ hasCrypto,
+ hasOpenSSL3,
+ hasQuic,
+ hasMultiLocalhost,
+ invalidArgTypeHelper,
+ isAIX,
+ isAlive,
+ isDumbTerminal,
+ isFreeBSD,
+ isLinux,
+ isMainThread,
+ isOpenBSD,
+ isOSX,
+ isPi,
+ isSunOS,
+ isWindows,
+ localIPv6Hosts,
+ mustCall,
+ mustCallAtLeast,
+ mustNotCall,
+ mustSucceed,
+ nodeProcessAborted,
+ PIPE,
+ platformTimeout,
+ printSkipMessage,
+ pwdCommand,
+ requireNoPackageJSONAbove,
+ runWithInvalidFD,
+ skip,
+ skipIf32Bits,
+ skipIfDumbTerminal,
+ skipIfEslintMissing,
+ skipIfInspectorDisabled,
+ skipIfWorker,
+
+ get enoughTestMem() {
+ return require('os').totalmem() > 0x70000000
+ /* 1.75 Gb */
},
get hasFipsCrypto() {
- return hasCrypto && require('crypto').fips;
+ return hasCrypto && require('crypto').getFips()
+ },
+
+ get hasIPv6() {
+ const iFaces = require('os').networkInterfaces()
+
+ const re = isWindows ? /Loopback Pseudo-Interface/ : /lo/
+ return Object.keys(iFaces).some((name) => {
+ return re.test(name) && iFaces[name].some(({ family }) => family === 6)
+ })
},
get inFreeBSDJail() {
- if (inFreeBSDJail !== null) return inFreeBSDJail;
+ if (inFreeBSDJail !== null) return inFreeBSDJail
if (exports.isFreeBSD && execSync('sysctl -n security.jail.jailed').toString() === '1\n') {
- inFreeBSDJail = true;
+ inFreeBSDJail = true
} else {
- inFreeBSDJail = false;
+ inFreeBSDJail = false
}
- return inFreeBSDJail;
+ return inFreeBSDJail
+ },
+
+ // On IBMi, process.platform and os.platform() both return 'aix',
+ // It is not enough to differentiate between IBMi and real AIX system.
+ get isIBMi() {
+ return require('os').type() === 'OS400'
+ },
+
+ get isLinuxPPCBE() {
+ return process.platform === 'linux' && process.arch === 'ppc64' && require('os').endianness() === 'BE'
},
get localhostIPv4() {
- if (localhostIPv4 !== null) return localhostIPv4;
+ if (localhostIPv4 !== null) return localhostIPv4
if (this.inFreeBSDJail) {
// Jailed network interfaces are a bit special - since we need to jump
// through loops, as well as this being an exception case, assume the
// user will provide this instead.
if (process.env.LOCALHOST) {
- localhostIPv4 = process.env.LOCALHOST;
+ localhostIPv4 = process.env.LOCALHOST
} else {
- console.error('Looks like we\'re in a FreeBSD Jail. ' + 'Please provide your default interface address ' + 'as LOCALHOST or expect some tests to fail.');
+ console.error(
+ "Looks like we're in a FreeBSD Jail. " +
+ 'Please provide your default interface address ' +
+ 'as LOCALHOST or expect some tests to fail.'
+ )
}
}
- if (localhostIPv4 === null) localhostIPv4 = '127.0.0.1';
- return localhostIPv4;
+ if (localhostIPv4 === null) localhostIPv4 = '127.0.0.1'
+ return localhostIPv4
},
// opensslCli defined lazily to reduce overhead of spawnSync
get opensslCli() {
- if (opensslCli !== null) return opensslCli;
+ if (opensslCli !== null) return opensslCli
if (process.config.variables.node_shared_openssl) {
- // use external command
- opensslCli = 'openssl';
+ // Use external command
+ opensslCli = 'openssl'
} else {
- // use command built from sources included in Node.js repository
- opensslCli = path.join(path.dirname(process.execPath), 'openssl-cli');
+ // Use command built from sources included in Node.js repository
+ opensslCli = path.join(path.dirname(process.execPath), 'openssl-cli')
}
- if (exports.isWindows) opensslCli += '.exe';
- var opensslCmd = spawnSync(opensslCli, ['version']);
+ if (exports.isWindows) opensslCli += '.exe'
+ const opensslCmd = spawnSync(opensslCli, ['version'])
if (opensslCmd.status !== 0 || opensslCmd.error !== undefined) {
- // openssl command cannot be executed
- opensslCli = false;
+ // OpenSSL command cannot be executed
+ opensslCli = false
}
- return opensslCli;
+ return opensslCli
},
get PORT() {
if (+process.env.TEST_PARALLEL) {
- throw new Error('common.PORT cannot be used in a parallelized test');
+ throw new Error('common.PORT cannot be used in a parallelized test')
}
- return +process.env.NODE_COMMON_PORT || 12346;
- }
-
-};
+ return +process.env.NODE_COMMON_PORT || 12346
+ },
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
+ /**
+ * Returns the EOL character used by this Git checkout.
+ */
+ get checkoutEOL() {
+ return fs.readFileSync(__filename).includes('\r\n') ? '\r\n' : '\n'
+ }
+}
+const validProperties = new Set(Object.keys(common))
+module.exports = new Proxy(common, {
+ get(obj, prop) {
+ if (!validProperties.has(prop)) throw new Error(`Using invalid common property: '${prop}'`)
+ return obj[prop]
}
-}
\ No newline at end of file
+})
diff --git a/test/common/index.mjs b/test/common/index.mjs
index d320100604..ec181dcacb 100644
--- a/test/common/index.mjs
+++ b/test/common/index.mjs
@@ -1,28 +1,26 @@
-/**/
- require('@babel/polyfill');
- var util = require('util');
- for (var i in util) exports[i] = util[i];
- /**/// Flags: --experimental-modules
-/* eslint-disable node-core/required-modules */
-import common from './index.js';
+import { createRequire } from 'module';
+
+const require = createRequire(import.meta.url);
+const common = require('./index.js');
const {
isMainThread,
isWindows,
isAIX,
+ isIBMi,
isLinuxPPCBE,
isSunOS,
+ isDumbTerminal,
isFreeBSD,
isOpenBSD,
isLinux,
isOSX,
enoughTestMem,
- enoughTestCpu,
- rootDir,
buildType,
localIPv6Hosts,
opensslCli,
PIPE,
+ hasCrypto,
hasIPv6,
childShouldThrowAndAbort,
createZeroFilledFile,
@@ -30,25 +28,23 @@ const {
allowGlobals,
mustCall,
mustCallAtLeast,
+ mustSucceed,
hasMultiLocalhost,
+ skipIfDumbTerminal,
skipIfEslintMissing,
canCreateSymLink,
getCallSite,
mustNotCall,
printSkipMessage,
skip,
- ArrayStream,
nodeProcessAborted,
- busyLoop,
isAlive,
- noWarnCode,
expectWarning,
expectsError,
skipIfInspectorDisabled,
skipIf32Bits,
getArrayBufferViews,
getBufferSources,
- disableCrashOnUnhandledRejection,
getTTYfd,
runWithInvalidFD
} = common;
@@ -57,19 +53,20 @@ export {
isMainThread,
isWindows,
isAIX,
+ isIBMi,
isLinuxPPCBE,
isSunOS,
+ isDumbTerminal,
isFreeBSD,
isOpenBSD,
isLinux,
isOSX,
enoughTestMem,
- enoughTestCpu,
- rootDir,
buildType,
localIPv6Hosts,
opensslCli,
PIPE,
+ hasCrypto,
hasIPv6,
childShouldThrowAndAbort,
createZeroFilledFile,
@@ -77,31 +74,24 @@ export {
allowGlobals,
mustCall,
mustCallAtLeast,
+ mustSucceed,
hasMultiLocalhost,
+ skipIfDumbTerminal,
skipIfEslintMissing,
canCreateSymLink,
getCallSite,
mustNotCall,
printSkipMessage,
skip,
- ArrayStream,
nodeProcessAborted,
- busyLoop,
isAlive,
- noWarnCode,
expectWarning,
expectsError,
skipIfInspectorDisabled,
skipIf32Bits,
getArrayBufferViews,
getBufferSources,
- disableCrashOnUnhandledRejection,
getTTYfd,
- runWithInvalidFD
+ runWithInvalidFD,
+ createRequire
};
-
-function forEach (xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
diff --git a/test/common/inspector-helper.js b/test/common/inspector-helper.js
deleted file mode 100644
index f90d43220b..0000000000
--- a/test/common/inspector-helper.js
+++ /dev/null
@@ -1,789 +0,0 @@
-"use strict";
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
-function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
-
-function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert');
-
-var fs = require('fs');
-
-var http = require('http');
-
-var fixtures = require('../common/fixtures');
-
-var _require = require('child_process'),
- spawn = _require.spawn;
-
-var _require2 = require('url'),
- parseURL = _require2.parse;
-
-var _require3 = require('internal/url'),
- pathToFileURL = _require3.pathToFileURL;
-
-var _require4 = require('events'),
- EventEmitter = _require4.EventEmitter;
-
-var _MAINSCRIPT = fixtures.path('loop.js');
-
-var DEBUG = false;
-var TIMEOUT = common.platformTimeout(15 * 1000);
-
-function spawnChildProcess(inspectorFlags, scriptContents, scriptFile) {
- var args = [].concat(inspectorFlags);
-
- if (scriptContents) {
- args.push('-e', scriptContents);
- } else {
- args.push(scriptFile);
- }
-
- var child = spawn(process.execPath, args);
- var handler = tearDown.bind(null, child);
- process.on('exit', handler);
- process.on('uncaughtException', handler);
- common.disableCrashOnUnhandledRejection();
- process.on('unhandledRejection', handler);
- process.on('SIGINT', handler);
- return child;
-}
-
-function makeBufferingDataCallback(dataCallback) {
- var buffer = Buffer.alloc(0);
- return function (data) {
- var newData = Buffer.concat([buffer, data]);
- var str = newData.toString('utf8');
- var lines = str.replace(/\r/g, '').split('\n');
- if (str.endsWith('\n')) buffer = Buffer.alloc(0);else buffer = Buffer.from(lines.pop(), 'utf8');
- var _iteratorNormalCompletion = true;
- var _didIteratorError = false;
- var _iteratorError = undefined;
-
- try {
- for (var _iterator = lines[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
- var line = _step.value;
- dataCallback(line);
- }
- } catch (err) {
- _didIteratorError = true;
- _iteratorError = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion && _iterator.return != null) {
- _iterator.return();
- }
- } finally {
- if (_didIteratorError) {
- throw _iteratorError;
- }
- }
- }
- };
-}
-
-function tearDown(child, err) {
- child.kill();
-
- if (err) {
- console.error(err);
- process.exit(1);
- }
-}
-
-function parseWSFrame(buffer) {
- // Protocol described in https://tools.ietf.org/html/rfc6455#section-5
- var message = null;
- if (buffer.length < 2) return {
- length: 0,
- message: message
- };
-
- if (buffer[0] === 0x88 && buffer[1] === 0x00) {
- return {
- length: 2,
- message: message,
- closed: true
- };
- }
-
- assert.strictEqual(buffer[0], 0x81);
- var dataLen = 0x7F & buffer[1];
- var bodyOffset = 2;
- if (buffer.length < bodyOffset + dataLen) return 0;
-
- if (dataLen === 126) {
- dataLen = buffer.readUInt16BE(2);
- bodyOffset = 4;
- } else if (dataLen === 127) {
- assert(buffer[2] === 0 && buffer[3] === 0, 'Inspector message too big');
- dataLen = buffer.readUIntBE(4, 6);
- bodyOffset = 10;
- }
-
- if (buffer.length < bodyOffset + dataLen) return {
- length: 0,
- message: message
- };
- var jsonPayload = buffer.slice(bodyOffset, bodyOffset + dataLen).toString('utf8');
-
- try {
- message = JSON.parse(jsonPayload);
- } catch (e) {
- console.error("JSON.parse() failed for: ".concat(jsonPayload));
- throw e;
- }
-
- if (DEBUG) console.log('[received]', JSON.stringify(message));
- return {
- length: bodyOffset + dataLen,
- message: message
- };
-}
-
-function formatWSFrame(message) {
- var messageBuf = Buffer.from(JSON.stringify(message));
- var wsHeaderBuf = Buffer.allocUnsafe(16);
- wsHeaderBuf.writeUInt8(0x81, 0);
- var byte2 = 0x80;
- var bodyLen = messageBuf.length;
- var maskOffset = 2;
-
- if (bodyLen < 126) {
- byte2 = 0x80 + bodyLen;
- } else if (bodyLen < 65536) {
- byte2 = 0xFE;
- wsHeaderBuf.writeUInt16BE(bodyLen, 2);
- maskOffset = 4;
- } else {
- byte2 = 0xFF;
- wsHeaderBuf.writeUInt32BE(bodyLen, 2);
- wsHeaderBuf.writeUInt32BE(0, 6);
- maskOffset = 10;
- }
-
- wsHeaderBuf.writeUInt8(byte2, 1);
- wsHeaderBuf.writeUInt32BE(0x01020408, maskOffset);
-
- for (var _i = 0; _i < messageBuf.length; _i++) {
- messageBuf[_i] = messageBuf[_i] ^ 1 << _i % 4;
- }
-
- return Buffer.concat([wsHeaderBuf.slice(0, maskOffset + 4), messageBuf]);
-}
-
-var InspectorSession =
-/*#__PURE__*/
-function () {
- function InspectorSession(socket, instance) {
- var _this = this;
-
- _classCallCheck(this, InspectorSession);
-
- this._instance = instance;
- this._socket = socket;
- this._nextId = 1;
- this._commandResponsePromises = new Map();
- this._unprocessedNotifications = [];
- this._notificationCallback = null;
- this._scriptsIdsByUrl = new Map();
- var buffer = Buffer.alloc(0);
- socket.on('data', function (data) {
- buffer = Buffer.concat([buffer, data]);
-
- do {
- var _parseWSFrame = parseWSFrame(buffer),
- length = _parseWSFrame.length,
- message = _parseWSFrame.message,
- closed = _parseWSFrame.closed;
-
- if (!length) break;
-
- if (closed) {
- socket.write(Buffer.from([0x88, 0x00])); // WS close frame
- }
-
- buffer = buffer.slice(length);
- if (message) _this._onMessage(message);
- } while (true);
- });
- this._terminationPromise = new Promise(function (resolve) {
- socket.once('close', resolve);
- });
- }
-
- _createClass(InspectorSession, [{
- key: "waitForServerDisconnect",
- value: function waitForServerDisconnect() {
- return this._terminationPromise;
- }
- }, {
- key: "disconnect",
- value: function () {
- var _disconnect = _asyncToGenerator(function* () {
- this._socket.destroy();
-
- return this.waitForServerDisconnect();
- });
-
- function disconnect() {
- return _disconnect.apply(this, arguments);
- }
-
- return disconnect;
- }()
- }, {
- key: "_onMessage",
- value: function _onMessage(message) {
- if (message.id) {
- var _this$_commandRespons = this._commandResponsePromises.get(message.id),
- resolve = _this$_commandRespons.resolve,
- reject = _this$_commandRespons.reject;
-
- this._commandResponsePromises.delete(message.id);
-
- if (message.result) resolve(message.result);else reject(message.error);
- } else {
- if (message.method === 'Debugger.scriptParsed') {
- var _message$params = message.params,
- scriptId = _message$params.scriptId,
- url = _message$params.url;
-
- this._scriptsIdsByUrl.set(scriptId, url);
-
- var fileUrl = url.startsWith('file:') ? url : pathToFileURL(url).toString();
-
- if (fileUrl === this.scriptURL().toString()) {
- this.mainScriptId = scriptId;
- }
- }
-
- if (this._notificationCallback) {
- // In case callback needs to install another
- var callback = this._notificationCallback;
- this._notificationCallback = null;
- callback(message);
- } else {
- this._unprocessedNotifications.push(message);
- }
- }
- }
- }, {
- key: "_sendMessage",
- value: function _sendMessage(message) {
- var _this2 = this;
-
- var msg = JSON.parse(JSON.stringify(message)); // Clone!
-
- msg.id = this._nextId++;
- if (DEBUG) console.log('[sent]', JSON.stringify(msg));
- var responsePromise = new Promise(function (resolve, reject) {
- _this2._commandResponsePromises.set(msg.id, {
- resolve: resolve,
- reject: reject
- });
- });
- return new Promise(function (resolve) {
- return _this2._socket.write(formatWSFrame(msg), resolve);
- }).then(function () {
- return responsePromise;
- });
- }
- }, {
- key: "send",
- value: function send(commands) {
- var _this3 = this;
-
- if (Array.isArray(commands)) {
- // Multiple commands means the response does not matter. There might even
- // never be a response.
- return Promise.all(commands.map(function (command) {
- return _this3._sendMessage(command);
- })).then(function () {});
- } else {
- return this._sendMessage(commands);
- }
- }
- }, {
- key: "waitForNotification",
- value: function waitForNotification(methodOrPredicate, description) {
- var desc = description || methodOrPredicate;
- var message = "Timed out waiting for matching notification (".concat(desc, "))");
- return fires(this._asyncWaitForNotification(methodOrPredicate), message, TIMEOUT);
- }
- }, {
- key: "_asyncWaitForNotification",
- value: function () {
- var _asyncWaitForNotification2 = _asyncToGenerator(function* (methodOrPredicate) {
- var _this4 = this;
-
- function matchMethod(notification) {
- return notification.method === methodOrPredicate;
- }
-
- var predicate = typeof methodOrPredicate === 'string' ? matchMethod : methodOrPredicate;
- var notification = null;
-
- do {
- if (this._unprocessedNotifications.length) {
- notification = this._unprocessedNotifications.shift();
- } else {
- notification = yield new Promise(function (resolve) {
- return _this4._notificationCallback = resolve;
- });
- }
- } while (!predicate(notification));
-
- return notification;
- });
-
- function _asyncWaitForNotification(_x) {
- return _asyncWaitForNotification2.apply(this, arguments);
- }
-
- return _asyncWaitForNotification;
- }()
- }, {
- key: "_isBreakOnLineNotification",
- value: function _isBreakOnLineNotification(message, line, expectedScriptPath) {
- if (message.method === 'Debugger.paused') {
- var callFrame = message.params.callFrames[0];
- var location = callFrame.location;
-
- var scriptPath = this._scriptsIdsByUrl.get(location.scriptId);
-
- assert.strictEqual(scriptPath.toString(), expectedScriptPath.toString(), "".concat(scriptPath, " !== ").concat(expectedScriptPath));
- assert.strictEqual(location.lineNumber, line);
- return true;
- }
- }
- }, {
- key: "waitForBreakOnLine",
- value: function waitForBreakOnLine(line, url) {
- var _this5 = this;
-
- return this.waitForNotification(function (notification) {
- return _this5._isBreakOnLineNotification(notification, line, url);
- }, "break on ".concat(url, ":").concat(line));
- }
- }, {
- key: "_matchesConsoleOutputNotification",
- value: function _matchesConsoleOutputNotification(notification, type, values) {
- if (!Array.isArray(values)) values = [values];
-
- if (notification.method === 'Runtime.consoleAPICalled') {
- var params = notification.params;
-
- if (params.type === type) {
- var _i2 = 0;
- var _iteratorNormalCompletion2 = true;
- var _didIteratorError2 = false;
- var _iteratorError2 = undefined;
-
- try {
- for (var _iterator2 = params.args[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
- var value = _step2.value;
- if (value.value !== values[_i2++]) return false;
- }
- } catch (err) {
- _didIteratorError2 = true;
- _iteratorError2 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion2 && _iterator2.return != null) {
- _iterator2.return();
- }
- } finally {
- if (_didIteratorError2) {
- throw _iteratorError2;
- }
- }
- }
-
- return _i2 === values.length;
- }
- }
- }
- }, {
- key: "waitForConsoleOutput",
- value: function waitForConsoleOutput(type, values) {
- var _this6 = this;
-
- var desc = "Console output matching ".concat(JSON.stringify(values));
- return this.waitForNotification(function (notification) {
- return _this6._matchesConsoleOutputNotification(notification, type, values);
- }, desc);
- }
- }, {
- key: "runToCompletion",
- value: function () {
- var _runToCompletion = _asyncToGenerator(function* () {
- console.log('[test]', 'Verify node waits for the frontend to disconnect');
- yield this.send({
- 'method': 'Debugger.resume'
- });
- yield this.waitForNotification(function (notification) {
- return notification.method === 'Runtime.executionContextDestroyed' && notification.params.executionContextId === 1;
- });
-
- while ((yield this._instance.nextStderrString()) !== 'Waiting for the debugger to disconnect...') {
- ;
- }
-
- yield this.disconnect();
- });
-
- function runToCompletion() {
- return _runToCompletion.apply(this, arguments);
- }
-
- return runToCompletion;
- }()
- }, {
- key: "scriptPath",
- value: function scriptPath() {
- return this._instance.scriptPath();
- }
- }, {
- key: "script",
- value: function script() {
- return this._instance.script();
- }
- }, {
- key: "scriptURL",
- value: function scriptURL() {
- return pathToFileURL(this.scriptPath());
- }
- }]);
-
- return InspectorSession;
-}();
-
-var NodeInstance =
-/*#__PURE__*/
-function (_EventEmitter) {
- _inherits(NodeInstance, _EventEmitter);
-
- function NodeInstance() {
- var _this7;
-
- var inspectorFlags = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['--inspect-brk=0'];
- var scriptContents = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
- var scriptFile = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : _MAINSCRIPT;
-
- _classCallCheck(this, NodeInstance);
-
- _this7 = _possibleConstructorReturn(this, _getPrototypeOf(NodeInstance).call(this));
- _this7._scriptPath = scriptFile;
- _this7._script = scriptFile ? null : scriptContents;
- _this7._portCallback = null;
- _this7.portPromise = new Promise(function (resolve) {
- return _this7._portCallback = resolve;
- });
- _this7._process = spawnChildProcess(inspectorFlags, scriptContents, scriptFile);
- _this7._running = true;
- _this7._stderrLineCallback = null;
- _this7._unprocessedStderrLines = [];
-
- _this7._process.stdout.on('data', makeBufferingDataCallback(function (line) {
- _this7.emit('stdout', line);
-
- console.log('[out]', line);
- }));
-
- _this7._process.stderr.on('data', makeBufferingDataCallback(function (message) {
- return _this7.onStderrLine(message);
- }));
-
- _this7._shutdownPromise = new Promise(function (resolve) {
- _this7._process.once('exit', function (exitCode, signal) {
- resolve({
- exitCode: exitCode,
- signal: signal
- });
- _this7._running = false;
- });
- });
- return _this7;
- }
-
- _createClass(NodeInstance, [{
- key: "onStderrLine",
- value: function onStderrLine(line) {
- console.log('[err]', line);
-
- if (this._portCallback) {
- var matches = line.match(/Debugger listening on ws:\/\/.+:(\d+)\/.+/);
-
- if (matches) {
- this._portCallback(matches[1]);
-
- this._portCallback = null;
- }
- }
-
- if (this._stderrLineCallback) {
- this._stderrLineCallback(line);
-
- this._stderrLineCallback = null;
- } else {
- this._unprocessedStderrLines.push(line);
- }
- }
- }, {
- key: "httpGet",
- value: function httpGet(host, path, hostHeaderValue) {
- console.log('[test]', "Testing ".concat(path));
- var headers = hostHeaderValue ? {
- 'Host': hostHeaderValue
- } : null;
- return this.portPromise.then(function (port) {
- return new Promise(function (resolve, reject) {
- var req = http.get({
- host: host,
- port: port,
- path: path,
- headers: headers
- }, function (res) {
- var response = '';
- res.setEncoding('utf8');
- res.on('data', function (data) {
- return response += data.toString();
- }).on('end', function () {
- resolve(response);
- });
- });
- req.on('error', reject);
- });
- }).then(function (response) {
- try {
- return JSON.parse(response);
- } catch (e) {
- e.body = response;
- throw e;
- }
- });
- }
- }, {
- key: "sendUpgradeRequest",
- value: function () {
- var _sendUpgradeRequest = _asyncToGenerator(function* () {
- var response = yield this.httpGet(null, '/json/list');
- var devtoolsUrl = response[0].webSocketDebuggerUrl;
- var port = yield this.portPromise;
- return http.get({
- port: port,
- path: parseURL(devtoolsUrl).path,
- headers: {
- 'Connection': 'Upgrade',
- 'Upgrade': 'websocket',
- 'Sec-WebSocket-Version': 13,
- 'Sec-WebSocket-Key': 'key=='
- }
- });
- });
-
- function sendUpgradeRequest() {
- return _sendUpgradeRequest.apply(this, arguments);
- }
-
- return sendUpgradeRequest;
- }()
- }, {
- key: "connectInspectorSession",
- value: function () {
- var _connectInspectorSession = _asyncToGenerator(function* () {
- var _this8 = this;
-
- console.log('[test]', 'Connecting to a child Node process');
- var upgradeRequest = yield this.sendUpgradeRequest();
- return new Promise(function (resolve) {
- upgradeRequest.on('upgrade', function (message, socket) {
- return resolve(new InspectorSession(socket, _this8));
- }).on('response', common.mustNotCall('Upgrade was not received'));
- });
- });
-
- function connectInspectorSession() {
- return _connectInspectorSession.apply(this, arguments);
- }
-
- return connectInspectorSession;
- }()
- }, {
- key: "expectConnectionDeclined",
- value: function () {
- var _expectConnectionDeclined = _asyncToGenerator(function* () {
- console.log('[test]', 'Checking upgrade is not possible');
- var upgradeRequest = yield this.sendUpgradeRequest();
- return new Promise(function (resolve) {
- upgradeRequest.on('upgrade', common.mustNotCall('Upgrade was received')).on('response', function (response) {
- return response.on('data', function () {}).on('end', function () {
- return resolve(response.statusCode);
- });
- });
- });
- });
-
- function expectConnectionDeclined() {
- return _expectConnectionDeclined.apply(this, arguments);
- }
-
- return expectConnectionDeclined;
- }()
- }, {
- key: "expectShutdown",
- value: function expectShutdown() {
- return this._shutdownPromise;
- }
- }, {
- key: "nextStderrString",
- value: function nextStderrString() {
- var _this9 = this;
-
- if (this._unprocessedStderrLines.length) return Promise.resolve(this._unprocessedStderrLines.shift());
- return new Promise(function (resolve) {
- return _this9._stderrLineCallback = resolve;
- });
- }
- }, {
- key: "write",
- value: function write(message) {
- this._process.stdin.write(message);
- }
- }, {
- key: "kill",
- value: function kill() {
- this._process.kill();
-
- return this.expectShutdown();
- }
- }, {
- key: "scriptPath",
- value: function scriptPath() {
- return this._scriptPath;
- }
- }, {
- key: "script",
- value: function script() {
- if (this._script === null) this._script = fs.readFileSync(this.scriptPath(), 'utf8');
- return this._script;
- }
- }], [{
- key: "startViaSignal",
- value: function () {
- var _startViaSignal = _asyncToGenerator(function* (scriptContents) {
- var instance = new NodeInstance([], "".concat(scriptContents, "\nprocess._rawDebug('started');"), undefined);
- var msg = 'Timed out waiting for process to start';
-
- while ((yield fires(instance.nextStderrString(), msg, TIMEOUT)) !== 'started') {}
-
- process._debugProcess(instance._process.pid);
-
- return instance;
- });
-
- function startViaSignal(_x2) {
- return _startViaSignal.apply(this, arguments);
- }
-
- return startViaSignal;
- }()
- }]);
-
- return NodeInstance;
-}(EventEmitter);
-
-function onResolvedOrRejected(promise, callback) {
- return promise.then(function (result) {
- callback();
- return result;
- }, function (error) {
- callback();
- throw error;
- });
-}
-
-function timeoutPromise(error, timeoutMs) {
- var clearCallback = null;
- var done = false;
- var promise = onResolvedOrRejected(new Promise(function (resolve, reject) {
- var timeout = setTimeout(function () {
- return reject(error);
- }, timeoutMs);
-
- clearCallback = function clearCallback() {
- if (done) return;
- clearTimeout(timeout);
- resolve();
- };
- }), function () {
- return done = true;
- });
- promise.clear = clearCallback;
- return promise;
-} // Returns a new promise that will propagate `promise` resolution or rejection
-// if that happens within the `timeoutMs` timespan, or rejects with `error` as
-// a reason otherwise.
-
-
-function fires(promise, error, timeoutMs) {
- var timeout = timeoutPromise(error, timeoutMs);
- return Promise.race([onResolvedOrRejected(promise, function () {
- return timeout.clear();
- }), timeout]);
-}
-
-module.exports = {
- NodeInstance: NodeInstance
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/internet.js b/test/common/internet.js
deleted file mode 100644
index e34dabbe21..0000000000
--- a/test/common/internet.js
+++ /dev/null
@@ -1,107 +0,0 @@
-"use strict";
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-// Utilities for internet-related tests
-
-
-var addresses = {
- // A generic host that has registered common DNS records,
- // supports both IPv4 and IPv6, and provides basic HTTP/HTTPS services
- INET_HOST: 'nodejs.org',
- // A host that provides IPv4 services
- INET4_HOST: 'nodejs.org',
- // A host that provides IPv6 services
- INET6_HOST: 'nodejs.org',
- // An accessible IPv4 IP,
- // defaults to the Google Public DNS IPv4 address
- INET4_IP: '8.8.8.8',
- // An accessible IPv6 IP,
- // defaults to the Google Public DNS IPv6 address
- INET6_IP: '2001:4860:4860::8888',
- // An invalid host that cannot be resolved
- // See https://tools.ietf.org/html/rfc2606#section-2
- INVALID_HOST: 'something.invalid',
- // A host with MX records registered
- MX_HOST: 'nodejs.org',
- // A host with SRV records registered
- SRV_HOST: '_jabber._tcp.google.com',
- // A host with PTR records registered
- PTR_HOST: '8.8.8.8.in-addr.arpa',
- // A host with NAPTR records registered
- NAPTR_HOST: 'sip2sip.info',
- // A host with SOA records registered
- SOA_HOST: 'nodejs.org',
- // A host with CNAME records registered
- CNAME_HOST: 'blog.nodejs.org',
- // A host with NS records registered
- NS_HOST: 'nodejs.org',
- // A host with TXT records registered
- TXT_HOST: 'nodejs.org',
- // An accessible IPv4 DNS server
- DNS4_SERVER: '8.8.8.8',
- // An accessible IPv4 DNS server
- DNS6_SERVER: '2001:4860:4860::8888'
-};
-var _iteratorNormalCompletion = true;
-var _didIteratorError = false;
-var _iteratorError = undefined;
-
-try {
- for (var _iterator = objectKeys(addresses)[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
- var key = _step.value;
- var envName = "NODE_TEST_".concat(key);
-
- if (process.env[envName]) {
- addresses[key] = process.env[envName];
- }
- }
-} catch (err) {
- _didIteratorError = true;
- _iteratorError = err;
-} finally {
- try {
- if (!_iteratorNormalCompletion && _iterator.return != null) {
- _iterator.return();
- }
- } finally {
- if (_didIteratorError) {
- throw _iteratorError;
- }
- }
-}
-
-module.exports = {
- addresses: addresses
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/ongc.js b/test/common/ongc.js
deleted file mode 100644
index c8ec0f07bc..0000000000
--- a/test/common/ongc.js
+++ /dev/null
@@ -1,66 +0,0 @@
-"use strict";
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert');
-
-var gcTrackerMap = new WeakMap();
-var gcTrackerTag = 'NODE_TEST_COMMON_GC_TRACKER';
-
-function onGC(obj, gcListener) {
- var async_hooks =
- /*require('async_hooks');
- const onGcAsyncHook = async_hooks.createHook({
- init: common.mustCallAtLeast(function(id, type) {
- if (this.trackedId === undefined) {
- assert.strictEqual(type, gcTrackerTag);
- this.trackedId = id;
- }
- }),
- destroy(id) {
- assert.notStrictEqual(this.trackedId, -1);
- if (id === this.trackedId) {
- this.gcListener.ongc();
- onGcAsyncHook.disable();
- }
- }
- }).enable();*/
- onGcAsyncHook.gcListener = gcListener;
- gcTrackerMap.set(obj, new async_hooks.AsyncResource(gcTrackerTag));
- obj = null;
-}
-
-module.exports = onGC;
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/shared-lib-util.js b/test/common/shared-lib-util.js
deleted file mode 100644
index fc96fc647a..0000000000
--- a/test/common/shared-lib-util.js
+++ /dev/null
@@ -1,80 +0,0 @@
-"use strict";
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var common = require('../common');
-
-var path = require('path');
-
-var kNodeShared = Boolean(process.config.variables.node_shared);
-var kShlibSuffix = process.config.variables.shlib_suffix;
-var kExecPath = path.dirname(process.execPath); // If node executable is linked to shared lib, need to take care about the
-// shared lib path.
-
-function addLibraryPath(env) {
- if (!kNodeShared) {
- return;
- }
-
- env = env || process.env;
- env.LD_LIBRARY_PATH = (env.LD_LIBRARY_PATH ? env.LD_LIBRARY_PATH + path.delimiter : '') + path.join(kExecPath, 'lib.target'); // For AIX.
-
- env.LIBPATH = (env.LIBPATH ? env.LIBPATH + path.delimiter : '') + path.join(kExecPath, 'lib.target'); // For Mac OSX.
-
- env.DYLD_LIBRARY_PATH = (env.DYLD_LIBRARY_PATH ? env.DYLD_LIBRARY_PATH + path.delimiter : '') + kExecPath; // For Windows.
-
- env.PATH = (env.PATH ? env.PATH + path.delimiter : '') + kExecPath;
-} // Get the full path of shared lib.
-
-
-function getSharedLibPath() {
- if (common.isWindows) {
- return path.join(kExecPath, 'node.dll');
- } else if (common.isOSX) {
- return path.join(kExecPath, "libnode.".concat(kShlibSuffix));
- } else {
- return path.join(kExecPath, 'lib.target', "libnode.".concat(kShlibSuffix));
- }
-} // Get the binary path of stack frames.
-
-
-function getBinaryPath() {
- return kNodeShared ? getSharedLibPath() : process.execPath;
-}
-
-module.exports = {
- addLibraryPath: addLibraryPath,
- getBinaryPath: getBinaryPath,
- getSharedLibPath: getSharedLibPath
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/tick.js b/test/common/tick.js
deleted file mode 100644
index f4f5fb772d..0000000000
--- a/test/common/tick.js
+++ /dev/null
@@ -1,48 +0,0 @@
-"use strict";
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-require('../common');
-
-module.exports = function tick(x, cb) {
- function ontick() {
- if (--x === 0) {
- if (typeof cb === 'function') cb();
- } else {
- setImmediate(ontick);
- }
- }
-
- setImmediate(ontick);
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/tls.js b/test/common/tls.js
deleted file mode 100644
index 98b00d0e09..0000000000
--- a/test/common/tls.js
+++ /dev/null
@@ -1,244 +0,0 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules, node-core/crypto-check */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var crypto = require('crypto');
-
-var net = require('net');
-
-exports.ccs = Buffer.from('140303000101', 'hex');
-
-var TestTLSSocket =
-/*#__PURE__*/
-function (_net$Socket) {
- _inherits(TestTLSSocket, _net$Socket);
-
- function TestTLSSocket(server_cert) {
- var _this;
-
- _classCallCheck(this, TestTLSSocket);
-
- _this = _possibleConstructorReturn(this, _getPrototypeOf(TestTLSSocket).call(this));
- _this.server_cert = server_cert;
- _this.version = Buffer.from('0303', 'hex');
- _this.handshake_list = []; // AES128-GCM-SHA256
-
- _this.ciphers = Buffer.from('000002009c0', 'hex');
- _this.pre_master_secret = Buffer.concat([_this.version, crypto.randomBytes(46)]);
- _this.master_secret = null;
- _this.write_seq = 0;
- _this.client_random = crypto.randomBytes(32);
-
- _this.on('handshake', function (msg) {
- _this.handshake_list.push(msg);
- });
-
- _this.on('server_random', function (server_random) {
- _this.master_secret = PRF12('sha256', _this.pre_master_secret, 'master secret', Buffer.concat([_this.client_random, server_random]), 48);
- var key_block = PRF12('sha256', _this.master_secret, 'key expansion', Buffer.concat([server_random, _this.client_random]), 40);
- _this.client_writeKey = key_block.slice(0, 16);
- _this.client_writeIV = key_block.slice(32, 36);
- });
-
- return _this;
- }
-
- _createClass(TestTLSSocket, [{
- key: "createClientHello",
- value: function createClientHello() {
- var compressions = Buffer.from('0100', 'hex'); // null
-
- var msg = addHandshakeHeader(0x01, Buffer.concat([this.version, this.client_random, this.ciphers, compressions]));
- this.emit('handshake', msg);
- return addRecordHeader(0x16, msg);
- }
- }, {
- key: "createClientKeyExchange",
- value: function createClientKeyExchange() {
- var encrypted_pre_master_secret = crypto.publicEncrypt({
- key: this.server_cert,
- padding: crypto.constants.RSA_PKCS1_PADDING
- }, this.pre_master_secret);
- var length = Buffer.alloc(2);
- length.writeUIntBE(encrypted_pre_master_secret.length, 0, 2);
- var msg = addHandshakeHeader(0x10, Buffer.concat([length, encrypted_pre_master_secret]));
- this.emit('handshake', msg);
- return addRecordHeader(0x16, msg);
- }
- }, {
- key: "createFinished",
- value: function createFinished() {
- var shasum = crypto.createHash('sha256');
- shasum.update(Buffer.concat(this.handshake_list));
- var message_hash = shasum.digest();
- var r = PRF12('sha256', this.master_secret, 'client finished', message_hash, 12);
- var msg = addHandshakeHeader(0x14, r);
- this.emit('handshake', msg);
- return addRecordHeader(0x16, msg);
- }
- }, {
- key: "createIllegalHandshake",
- value: function createIllegalHandshake() {
- var illegal_handshake = Buffer.alloc(5);
- return addRecordHeader(0x16, illegal_handshake);
- }
- }, {
- key: "parseTLSFrame",
- value: function parseTLSFrame(buf) {
- var offset = 0;
- var record = buf.slice(offset, 5);
- var type = record[0];
- var length = record.slice(3, 5).readUInt16BE(0);
- offset += 5;
- var remaining = buf.slice(offset, offset + length);
-
- if (type === 0x16) {
- do {
- remaining = this.parseTLSHandshake(remaining);
- } while (remaining.length > 0);
- }
-
- offset += length;
- return buf.slice(offset);
- }
- }, {
- key: "parseTLSHandshake",
- value: function parseTLSHandshake(buf) {
- var offset = 0;
- var handshake_type = buf[offset];
-
- if (handshake_type === 0x02) {
- var server_random = buf.slice(6, 6 + 32);
- this.emit('server_random', server_random);
- }
-
- offset += 1;
- var length = buf.readUIntBE(offset, 3);
- offset += 3;
- var handshake = buf.slice(0, offset + length);
- this.emit('handshake', handshake);
- offset += length;
- var remaining = buf.slice(offset);
- return remaining;
- }
- }, {
- key: "encrypt",
- value: function encrypt(plain) {
- var type = plain.slice(0, 1);
- var version = plain.slice(1, 3);
- var nonce = crypto.randomBytes(8);
- var iv = Buffer.concat([this.client_writeIV.slice(0, 4), nonce]);
- var bob = crypto.createCipheriv('aes-128-gcm', this.client_writeKey, iv);
- var write_seq = Buffer.alloc(8);
- write_seq.writeUInt32BE(this.write_seq++, 4);
- var aad = Buffer.concat([write_seq, plain.slice(0, 5)]);
- bob.setAAD(aad);
- var encrypted1 = bob.update(plain.slice(5));
- var encrypted = Buffer.concat([encrypted1, bob.final()]);
- var tag = bob.getAuthTag();
- var length = Buffer.alloc(2);
- length.writeUInt16BE(nonce.length + encrypted.length + tag.length, 0);
- return Buffer.concat([type, version, length, nonce, encrypted, tag]);
- }
- }]);
-
- return TestTLSSocket;
-}(net.Socket);
-
-function addRecordHeader(type, frame) {
- var record_layer = Buffer.from('0003030000', 'hex');
- record_layer[0] = type;
- record_layer.writeUInt16BE(frame.length, 3);
- return Buffer.concat([record_layer, frame]);
-}
-
-function addHandshakeHeader(type, msg) {
- var handshake_header = Buffer.alloc(4);
- handshake_header[0] = type;
- handshake_header.writeUIntBE(msg.length, 1, 3);
- return Buffer.concat([handshake_header, msg]);
-}
-
-function PRF12(algo, secret, label, seed, size) {
- var newSeed = Buffer.concat([Buffer.from(label, 'utf8'), seed]);
- return P_hash(algo, secret, newSeed, size);
-}
-
-function P_hash(algo, secret, seed, size) {
- var result = Buffer.alloc(size);
- var hmac = crypto.createHmac(algo, secret);
- hmac.update(seed);
- var a = hmac.digest();
- var j = 0;
-
- while (j < size) {
- hmac = crypto.createHmac(algo, secret);
- hmac.update(a);
- hmac.update(seed);
- var b = hmac.digest();
- var todo = b.length;
-
- if (j + todo > size) {
- todo = size - j;
- }
-
- b.copy(result, j, 0, todo);
- j += todo;
- hmac = crypto.createHmac(algo, secret);
- hmac.update(a);
- a = hmac.digest();
- }
-
- return result;
-}
-
-exports.TestTLSSocket = TestTLSSocket;
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/common/tmpdir.js b/test/common/tmpdir.js
index 4f3091de7c..4e29aa9691 100644
--- a/test/common/tmpdir.js
+++ b/test/common/tmpdir.js
@@ -1,101 +1,61 @@
-"use strict";
+'use strict'
-/**/
-require('@babel/polyfill');
+const fs = require('fs')
-var util = require('util');
+const path = require('path')
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
+const { isMainThread } = require('worker_threads')
+function rmSync(pathname) {
+ fs.rmSync(pathname, {
+ maxRetries: 3,
+ recursive: true,
+ force: true
+ })
+}
-var fs = require('fs');
+const testRoot = process.env.NODE_TEST_DIR ? fs.realpathSync(process.env.NODE_TEST_DIR) : path.resolve(__dirname, '..') // Using a `.` prefixed name, which is the convention for "hidden" on POSIX,
+// gets tools to ignore it by default or by simple rules, especially eslint.
-var path = require('path');
+const tmpdirName = '.tmp.' + (process.env.TEST_SERIAL_ID || process.env.TEST_THREAD_ID || '0')
+const tmpPath = path.join(testRoot, tmpdirName)
+let firstRefresh = true
-function rimrafSync(p) {
- var st;
+function refresh() {
+ rmSync(this.path)
+ fs.mkdirSync(this.path)
- try {
- st = fs.lstatSync(p);
- } catch (e) {
- if (e.code === 'ENOENT') return;
- }
+ if (firstRefresh) {
+ firstRefresh = false // Clean only when a test uses refresh. This allows for child processes to
+ // use the tmpdir and only the parent will clean on exit.
- try {
- if (st && st.isDirectory()) rmdirSync(p, null);else fs.unlinkSync(p);
- } catch (e) {
- if (e.code === 'ENOENT') return;
- if (e.code === 'EPERM') return rmdirSync(p, e);
- if (e.code !== 'EISDIR') throw e;
- rmdirSync(p, e);
+ process.on('exit', onexit)
}
}
-function rmdirSync(p, originalEr) {
+function onexit() {
+ // Change directory to avoid possible EBUSY
+ if (isMainThread) process.chdir(testRoot)
+
try {
- fs.rmdirSync(p);
+ rmSync(tmpPath)
} catch (e) {
- if (e.code === 'ENOTDIR') throw originalEr;
-
- if (e.code === 'ENOTEMPTY' || e.code === 'EEXIST' || e.code === 'EPERM') {
- var enc = process.platform === 'linux' ? 'buffer' : 'utf8';
- forEach(fs.readdirSync(p, enc), function (f) {
- if (f instanceof Buffer) {
- var buf = Buffer.concat([Buffer.from(p), Buffer.from(path.sep), f]);
- rimrafSync(buf);
- } else {
- rimrafSync(path.join(p, f));
- }
- });
- fs.rmdirSync(p);
+ console.error("Can't clean tmpdir:", tmpPath)
+ const files = fs.readdirSync(tmpPath)
+ console.error('Files blocking:', files)
+
+ if (files.some((f) => f.startsWith('.nfs'))) {
+ // Warn about NFS "silly rename"
+ console.error('Note: ".nfs*" might be files that were open and ' + 'unlinked but not closed.')
+ console.error('See http://nfs.sourceforge.net/#faq_d2 for details.')
}
- }
-}
-
-var testRoot = process.env.NODE_TEST_DIR ? fs.realpathSync(process.env.NODE_TEST_DIR) : path.resolve(__dirname, '..'); // Using a `.` prefixed name, which is the convention for "hidden" on POSIX,
-// gets tools to ignore it by default or by simple rules, especially eslint.
-
-var tmpdirName = '.tmp';
-if (process.env.TEST_THREAD_ID) {
- tmpdirName += ".".concat(process.env.TEST_THREAD_ID);
-}
-
-var tmpPath = path.join(testRoot, tmpdirName);
-
-function refresh() {
- rimrafSync(this.path);
- fs.mkdirSync(this.path);
+ console.error()
+ throw e
+ }
}
module.exports = {
path: tmpPath,
- refresh: refresh
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
+ refresh
+}
diff --git a/test/common/wpt.js b/test/common/wpt.js
deleted file mode 100644
index 0d1b9c8008..0000000000
--- a/test/common/wpt.js
+++ /dev/null
@@ -1,66 +0,0 @@
-"use strict";
-
-/**/
-require('@babel/polyfill');
-
-var util = require('util');
-
-for (var i in util) {
- exports[i] = util[i];
-}
-/**/
-
-/* eslint-disable node-core/required-modules */
-
-
-'use strict';
-/**/
-
-
-var objectKeys = objectKeys || function (obj) {
- var keys = [];
-
- for (var key in obj) {
- keys.push(key);
- }
-
- return keys;
-};
-/**/
-
-
-var assert = require('assert'); // https://github.com/w3c/testharness.js/blob/master/testharness.js
-
-
-module.exports = {
- test: function test(fn, desc) {
- try {
- fn();
- } catch (err) {
- console.error("In ".concat(desc, ":"));
- throw err;
- }
- },
- assert_equals: assert.strictEqual,
- assert_true: function assert_true(value, message) {
- return assert.strictEqual(value, true, message);
- },
- assert_false: function assert_false(value, message) {
- return assert.strictEqual(value, false, message);
- },
- assert_throws: function assert_throws(code, func, desc) {
- assert.throws(func, function (err) {
- return typeof err === 'object' && 'name' in err && err.name.startsWith(code.name);
- }, desc);
- },
- assert_array_equals: assert.deepStrictEqual,
- assert_unreached: function assert_unreached(desc) {
- assert.fail("Reached unreachable code: ".concat(desc));
- }
-};
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
-}
\ No newline at end of file
diff --git a/test/fixtures/elipses.txt b/test/fixtures/elipses.txt
new file mode 100644
index 0000000000..6105600505
--- /dev/null
+++ b/test/fixtures/elipses.txt
@@ -0,0 +1 @@
+…………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………
\ No newline at end of file
diff --git a/test/fixtures/empty-with-bom.txt b/test/fixtures/empty-with-bom.txt
new file mode 100644
index 0000000000..5f282702bb
--- /dev/null
+++ b/test/fixtures/empty-with-bom.txt
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/test/fixtures/empty.txt b/test/fixtures/empty.txt
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/test/fixtures/file-to-read-with-bom.txt b/test/fixtures/file-to-read-with-bom.txt
new file mode 100644
index 0000000000..d46c8708d9
--- /dev/null
+++ b/test/fixtures/file-to-read-with-bom.txt
@@ -0,0 +1,3 @@
+abc
+def
+ghi
diff --git a/test/fixtures/file-to-read-without-bom.txt b/test/fixtures/file-to-read-without-bom.txt
new file mode 100644
index 0000000000..8edb37e36d
--- /dev/null
+++ b/test/fixtures/file-to-read-without-bom.txt
@@ -0,0 +1,3 @@
+abc
+def
+ghi
diff --git a/test/fixtures/outside.txt b/test/fixtures/outside.txt
new file mode 100644
index 0000000000..044c4b9614
--- /dev/null
+++ b/test/fixtures/outside.txt
@@ -0,0 +1,2 @@
+this file is part of the WASI tests. it exists outside of the sandbox, and
+should be inaccessible from the WASI tests.
diff --git a/test/fixtures/readfile_pipe_test.txt b/test/fixtures/readfile_pipe_test.txt
new file mode 100644
index 0000000000..65975655dc
--- /dev/null
+++ b/test/fixtures/readfile_pipe_test.txt
@@ -0,0 +1,5 @@
+xxxx xxxx xxxx xxxx
+xxxx xxxx xxxx xxxx
+xxxx xxxx xxxx xxxx
+xxxx xxxx xxxx xxxx
+xxxx xxxx xxxx xxxx
diff --git a/test/fixtures/tls-session-ticket.txt b/test/fixtures/tls-session-ticket.txt
new file mode 100644
index 0000000000..bc0f6b58e1
--- /dev/null
+++ b/test/fixtures/tls-session-ticket.txt
@@ -0,0 +1,23 @@
+-----BEGIN SSL SESSION PARAMETERS-----
+MIID2wIBAQICAwEEAgA1BCAMjLe+70uBSPGvybkTnPVUMwdbdtVbkMIXf8L5M8Kl
+VAQwog+Afs00cnYUcgD1BQewJyxX1e561oRuDTpy7BHABC1hC7hxTaul+pwv+cBx
+8D72oQYCBFFQF3OiBAICASyjggNhMIIDXTCCAkWgAwIBAgIJAMUSOvlaeyQHMA0G
+CSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRl
+MSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTAxMTE2MDkz
+MjQ5WhcNMTMxMTE1MDkzMjQ5WjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29t
+ZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+LXZOjcQCJq3+ZKUFabj71oo/ex
+/XsBcFqtBThjjTw9CVEVwfPQQp4XwtPiB204vnYXwQ1/R2NdTQqCZu47l79LssL/
+u2a5Y9+0NEU3nQA5qdt+1FAE0c5oexPimXOrR3GWfKz7PmZ2O0117IeCUUXPG5U8
+umhDe/4mDF4ZNJiKc404WthquTqgS7rLQZHhZ6D0EnGnOkzlmxJMYPNHSOY1/6iv
+dNUUcC87awNEA3lgfhy25IyBK3QJc+aYKNTbt70Lery3bu2wWLFGtmNiGlQTS4Js
+xImRsECTI727ObS7/FWAQsqW+COL0Sa5BuMFrFIpjPrEe0ih7vRRbdmXRwIDAQAB
+o1AwTjAdBgNVHQ4EFgQUDnV4d6mDtOnluLoCjkUHTX/n4agwHwYDVR0jBBgwFoAU
+DnV4d6mDtOnluLoCjkUHTX/n4agwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUF
+AAOCAQEAFwV4MQfTo+qMv9JMiynoIEiqfOz4RgtmBqRnXUffcjS2dhc7/z+FPZnM
+79Kej8eLHoVfxCyWRHFlzm93vEdvwxOCrD13EDOi08OOZfxWyIlCa6Bg8cMAKqQz
+d2OvQOWqlRWBTThBJIhWflU33izXQn5GdmYqhfpc+9ZHHGhvXNydtRQkdxVK2dZN
+zLBvBlLlRmtoClU7xm3A+/5dddePAQHEPtyFlUw49VYtZ3ru6KqPms7MKvcRhYLs
+y9rwSfuuniMlx4d0bDR7TOkw0QQSA0N8MGQRQpzl4mw4jLzyM5d5QtuGBh2P6hPG
+a0YQxtI3RPT/p6ENzzBiAKXiSfzox6QCBAClAwIBEg==
+-----END SSL SESSION PARAMETERS-----
diff --git a/test/fixtures/x.txt b/test/fixtures/x.txt
new file mode 100644
index 0000000000..cd470e6190
--- /dev/null
+++ b/test/fixtures/x.txt
@@ -0,0 +1 @@
+xyz
diff --git a/test/ours/lolex-fake-timers.js b/test/ours/lolex-fake-timers.js
deleted file mode 100644
index 59af1328ea..0000000000
--- a/test/ours/lolex-fake-timers.js
+++ /dev/null
@@ -1,41 +0,0 @@
-require('../common');
-var tap = require('tap');
-var util = require('util');
-var assert = require('assert');
-var lolex = require('lolex');
-var stream = require('../../');
-var Transform = stream.Transform;
-
-function MyTransform() {
- Transform.call(this);
-}
-
-util.inherits(MyTransform, Transform);
-
-var clock = lolex.install({toFake: [ 'setImmediate', 'nextTick' ]});
-var stream2DataCalled = false;
-
-var stream = new MyTransform();
-stream.on('data', function() {
- stream.on('end', function() {
-
- var stream2 = new MyTransform();
- stream2.on('data', function() {
- stream2.on('end', function() {
- stream2DataCalled = true
- });
- setImmediate(function() {
- stream2.end()
- });
- });
- stream2.emit('data')
-
- });
- stream.end();
-});
-stream.emit('data');
-
-clock.runAll()
-clock.uninstall();
-assert(stream2DataCalled);
-tap.pass('ok');
diff --git a/test/ours/test-errors.js b/test/ours/test-errors.js
new file mode 100644
index 0000000000..84a34a574d
--- /dev/null
+++ b/test/ours/test-errors.js
@@ -0,0 +1,115 @@
+'use strict'
+
+const t = require('tap')
+
+const { codes: errors } = require('../../lib/ours/errors')
+
+function checkError(err, Base, name, code, message) {
+ t.ok(err instanceof Base)
+ t.equal(err.name, name)
+ t.equal(err.code, code)
+ t.equal(err.message, message)
+} // Update this numbers based on the number of checkError below multiplied by the assertions within checkError
+
+t.plan(17 * 4)
+checkError(
+ new errors.ERR_INVALID_ARG_VALUE('name', 0),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_VALUE',
+ "The argument 'name' is invalid. Received 0"
+)
+checkError(
+ new errors.ERR_INVALID_ARG_VALUE('name', undefined),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_VALUE',
+ "The argument 'name' is invalid. Received undefined"
+)
+checkError(
+ new errors.ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], 0),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_TYPE',
+ 'The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received type number (0)'
+)
+checkError(
+ new errors.ERR_INVALID_ARG_TYPE('first argument', 'not string', 'foo'),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_TYPE',
+ "The first argument must be not string. Received type string ('foo')"
+)
+checkError(
+ new errors.ERR_INVALID_ARG_TYPE('obj.prop', 'string', undefined),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_TYPE',
+ 'The "obj.prop" property must be of type string. Received undefined'
+)
+checkError(
+ new errors.ERR_STREAM_PUSH_AFTER_EOF(),
+ Error,
+ 'Error',
+ 'ERR_STREAM_PUSH_AFTER_EOF',
+ 'stream.push() after EOF'
+)
+checkError(
+ new errors.ERR_METHOD_NOT_IMPLEMENTED('_read()'),
+ Error,
+ 'Error',
+ 'ERR_METHOD_NOT_IMPLEMENTED',
+ 'The _read() method is not implemented'
+)
+checkError(
+ new errors.ERR_METHOD_NOT_IMPLEMENTED('_write()'),
+ Error,
+ 'Error',
+ 'ERR_METHOD_NOT_IMPLEMENTED',
+ 'The _write() method is not implemented'
+)
+checkError(new errors.ERR_STREAM_PREMATURE_CLOSE(), Error, 'Error', 'ERR_STREAM_PREMATURE_CLOSE', 'Premature close')
+checkError(
+ new errors.ERR_STREAM_DESTROYED('pipe'),
+ Error,
+ 'Error',
+ 'ERR_STREAM_DESTROYED',
+ 'Cannot call pipe after a stream was destroyed'
+)
+checkError(
+ new errors.ERR_STREAM_DESTROYED('write'),
+ Error,
+ 'Error',
+ 'ERR_STREAM_DESTROYED',
+ 'Cannot call write after a stream was destroyed'
+)
+checkError(
+ new errors.ERR_MULTIPLE_CALLBACK(),
+ Error,
+ 'Error',
+ 'ERR_MULTIPLE_CALLBACK',
+ 'Callback called multiple times'
+)
+checkError(new errors.ERR_STREAM_CANNOT_PIPE(), Error, 'Error', 'ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable')
+checkError(new errors.ERR_STREAM_WRITE_AFTER_END(), Error, 'Error', 'ERR_STREAM_WRITE_AFTER_END', 'write after end')
+checkError(
+ new errors.ERR_STREAM_NULL_VALUES(),
+ TypeError,
+ 'TypeError',
+ 'ERR_STREAM_NULL_VALUES',
+ 'May not write null values to stream'
+)
+checkError(
+ new errors.ERR_UNKNOWN_ENCODING('foo'),
+ TypeError,
+ 'TypeError',
+ 'ERR_UNKNOWN_ENCODING',
+ 'Unknown encoding: foo'
+)
+checkError(
+ new errors.ERR_STREAM_UNSHIFT_AFTER_END_EVENT(),
+ Error,
+ 'Error',
+ 'ERR_STREAM_UNSHIFT_AFTER_END_EVENT',
+ 'stream.unshift() after end event'
+)
diff --git a/test/ours/test-fake-timers.js b/test/ours/test-fake-timers.js
new file mode 100644
index 0000000000..b4cc966b1c
--- /dev/null
+++ b/test/ours/test-fake-timers.js
@@ -0,0 +1,43 @@
+'use strict'
+
+require('../common')
+
+const t = require('tap')
+
+const util = require('util')
+
+const fakeTimers = require('@sinonjs/fake-timers')
+
+const Transform = require('../../lib/ours/index').Transform
+
+t.plan(1)
+
+function MyTransform() {
+ Transform.call(this)
+}
+
+util.inherits(MyTransform, Transform)
+const clock = fakeTimers.install({
+ toFake: ['setImmediate', 'nextTick']
+})
+let stream2DataCalled = false
+const stream = new MyTransform()
+stream.on('data', function () {
+ stream.on('end', function () {
+ const stream2 = new MyTransform()
+ stream2.on('data', function () {
+ stream2.on('end', function () {
+ stream2DataCalled = true
+ })
+ setImmediate(function () {
+ stream2.end()
+ })
+ })
+ stream2.emit('data')
+ })
+ stream.end()
+})
+stream.emit('data')
+clock.runAll()
+clock.uninstall()
+t.ok(stream2DataCalled)
diff --git a/test/ours/test-stream-sync-write.js b/test/ours/test-stream-sync-write.js
index bfa7be9410..7fa0e46788 100644
--- a/test/ours/test-stream-sync-write.js
+++ b/test/ours/test-stream-sync-write.js
@@ -1,38 +1,49 @@
-require('../common');
-var util = require('util');
-var stream = require('../../');
-var WritableStream = stream.Writable;
+'use strict'
+require('../common')
-var InternalStream = function() {
- WritableStream.call(this);
-};
-util.inherits(InternalStream, WritableStream);
+const t = require('tap')
-InternalStream.prototype._write = function(chunk, encoding, callback) {
- callback();
-};
+const util = require('util')
-var internalStream = new InternalStream();
+const stream = require('../../lib/ours/index')
+const WritableStream = stream.Writable
+t.plan(1)
+const InternalStream = function () {
+ WritableStream.call(this)
+}
+
+util.inherits(InternalStream, WritableStream)
+let invocations = 0
+
+InternalStream.prototype._write = function (chunk, encoding, callback) {
+ callback()
+}
-var ExternalStream = function(writable) {
- this._writable = writable;
- WritableStream.call(this);
-};
-util.inherits(ExternalStream, WritableStream);
+const internalStream = new InternalStream()
-ExternalStream.prototype._write = function(chunk, encoding, callback) {
- this._writable.write(chunk, encoding, callback);
-};
+const ExternalStream = function (writable) {
+ this._writable = writable
+ WritableStream.call(this)
+}
+util.inherits(ExternalStream, WritableStream)
+ExternalStream.prototype._write = function (chunk, encoding, callback) {
+ this._writable.write(chunk, encoding, callback)
+}
-var externalStream = new ExternalStream(internalStream);
+const externalStream = new ExternalStream(internalStream)
-for (var i = 0; i < 2000; i++) {
- externalStream.write(i.toString());
+for (let i = 0; i < 2000; i++) {
+ externalStream.write(i.toString(), () => {
+ invocations++
+ })
}
-require('tap').pass('sync done');
+externalStream.end()
+externalStream.on('finish', () => {
+ t.equal(invocations, 2000)
+})
diff --git a/test/parallel/test-readable-from-iterator-closing.js b/test/parallel/test-readable-from-iterator-closing.js
new file mode 100644
index 0000000000..109ab7e34a
--- /dev/null
+++ b/test/parallel/test-readable-from-iterator-closing.js
@@ -0,0 +1,223 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const { mustCall, mustNotCall } = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+const { strictEqual } = require('assert')
+
+async function asyncSupport() {
+ const finallyMustCall = mustCall()
+ const bodyMustCall = mustCall()
+
+ async function* infiniteGenerate() {
+ try {
+ while (true) yield 'a'
+ } finally {
+ finallyMustCall()
+ }
+ }
+
+ const stream = Readable.from(infiniteGenerate())
+
+ for await (const chunk of stream) {
+ bodyMustCall()
+ strictEqual(chunk, 'a')
+ break
+ }
+}
+
+async function syncSupport() {
+ const finallyMustCall = mustCall()
+ const bodyMustCall = mustCall()
+
+ function* infiniteGenerate() {
+ try {
+ while (true) yield 'a'
+ } finally {
+ finallyMustCall()
+ }
+ }
+
+ const stream = Readable.from(infiniteGenerate())
+
+ for await (const chunk of stream) {
+ bodyMustCall()
+ strictEqual(chunk, 'a')
+ break
+ }
+}
+
+async function syncPromiseSupport() {
+ const returnMustBeAwaited = mustCall()
+ const bodyMustCall = mustCall()
+
+ function* infiniteGenerate() {
+ try {
+ while (true) yield Promise.resolve('a')
+ } finally {
+ // eslint-disable-next-line no-unsafe-finally
+ return {
+ then(cb) {
+ returnMustBeAwaited()
+ cb()
+ }
+ }
+ }
+ }
+
+ const stream = Readable.from(infiniteGenerate())
+
+ for await (const chunk of stream) {
+ bodyMustCall()
+ strictEqual(chunk, 'a')
+ break
+ }
+}
+
+async function syncRejectedSupport() {
+ const returnMustBeAwaited = mustCall()
+ const bodyMustNotCall = mustNotCall()
+ const catchMustCall = mustCall()
+ const secondNextMustNotCall = mustNotCall()
+
+ function* generate() {
+ try {
+ yield Promise.reject('a')
+ secondNextMustNotCall()
+ } finally {
+ // eslint-disable-next-line no-unsafe-finally
+ return {
+ then(cb) {
+ returnMustBeAwaited()
+ cb()
+ }
+ }
+ }
+ }
+
+ const stream = Readable.from(generate())
+
+ try {
+ for await (const chunk of stream) {
+ bodyMustNotCall(chunk)
+ }
+ } catch {
+ catchMustCall()
+ }
+}
+
+async function noReturnAfterThrow() {
+ const returnMustNotCall = mustNotCall()
+ const bodyMustNotCall = mustNotCall()
+ const catchMustCall = mustCall()
+ const nextMustCall = mustCall()
+ const stream = Readable.from({
+ [Symbol.asyncIterator]() {
+ return this
+ },
+
+ async next() {
+ nextMustCall()
+ throw new Error('a')
+ },
+
+ async return() {
+ returnMustNotCall()
+ return {
+ done: true
+ }
+ }
+ })
+
+ try {
+ for await (const chunk of stream) {
+ bodyMustNotCall(chunk)
+ }
+ } catch {
+ catchMustCall()
+ }
+}
+
+async function closeStreamWhileNextIsPending() {
+ const finallyMustCall = mustCall()
+ const dataMustCall = mustCall()
+ let resolveDestroy
+ const destroyed = new Promise((resolve) => {
+ resolveDestroy = mustCall(resolve)
+ })
+ let resolveYielded
+ const yielded = new Promise((resolve) => {
+ resolveYielded = mustCall(resolve)
+ })
+
+ async function* infiniteGenerate() {
+ try {
+ while (true) {
+ yield 'a'
+ resolveYielded()
+ await destroyed
+ }
+ } finally {
+ finallyMustCall()
+ }
+ }
+
+ const stream = Readable.from(infiniteGenerate())
+ stream.on('data', (data) => {
+ dataMustCall()
+ strictEqual(data, 'a')
+ })
+ yielded.then(() => {
+ stream.destroy()
+ resolveDestroy()
+ })
+}
+
+async function closeAfterNullYielded() {
+ const finallyMustCall = mustCall()
+ const dataMustCall = mustCall(3)
+
+ function* generate() {
+ try {
+ yield 'a'
+ yield 'a'
+ yield 'a'
+ } finally {
+ finallyMustCall()
+ }
+ }
+
+ const stream = Readable.from(generate())
+ stream.on('data', (chunk) => {
+ dataMustCall()
+ strictEqual(chunk, 'a')
+ })
+}
+
+Promise.all([
+ asyncSupport(),
+ syncSupport(),
+ syncPromiseSupport(),
+ syncRejectedSupport(),
+ noReturnAfterThrow(),
+ closeStreamWhileNextIsPending(),
+ closeAfterNullYielded()
+]).then(mustCall())
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-readable-from.js b/test/parallel/test-readable-from.js
index 83e91f1615..60ce376ebc 100644
--- a/test/parallel/test-readable-from.js
+++ b/test/parallel/test-readable-from.js
@@ -1,398 +1,227 @@
-"use strict";
+'use strict'
-function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+const tap = require('tap')
-function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+const silentConsole = {
+ log() {},
-function _awaitAsyncGenerator(value) { return new _AwaitValue(value); }
-
-function _wrapAsyncGenerator(fn) { return function () { return new _AsyncGenerator(fn.apply(this, arguments)); }; }
-
-function _AsyncGenerator(gen) { var front, back; function send(key, arg) { return new Promise(function (resolve, reject) { var request = { key: key, arg: arg, resolve: resolve, reject: reject, next: null }; if (back) { back = back.next = request; } else { front = back = request; resume(key, arg); } }); } function resume(key, arg) { try { var result = gen[key](arg); var value = result.value; var wrappedAwait = value instanceof _AwaitValue; Promise.resolve(wrappedAwait ? value.wrapped : value).then(function (arg) { if (wrappedAwait) { resume(key === "return" ? "return" : "next", arg); return; } settle(result.done ? "return" : "normal", arg); }, function (err) { resume("throw", err); }); } catch (err) { settle("throw", err); } } function settle(type, value) { switch (type) { case "return": front.resolve({ value: value, done: true }); break; case "throw": front.reject(value); break; default: front.resolve({ value: value, done: false }); break; } front = front.next; if (front) { resume(front.key, front.arg); } else { back = null; } } this._invoke = send; if (typeof gen.return !== "function") { this.return = undefined; } }
-
-if (typeof Symbol === "function" && Symbol.asyncIterator) { _AsyncGenerator.prototype[Symbol.asyncIterator] = function () { return this; }; }
-
-_AsyncGenerator.prototype.next = function (arg) { return this._invoke("next", arg); };
-
-_AsyncGenerator.prototype.throw = function (arg) { return this._invoke("throw", arg); };
-
-_AsyncGenerator.prototype.return = function (arg) { return this._invoke("return", arg); };
-
-function _AwaitValue(value) { this.wrapped = value; }
-
-function _asyncIterator(iterable) { var method; if (typeof Symbol !== "undefined") { if (Symbol.asyncIterator) { method = iterable[Symbol.asyncIterator]; if (method != null) return method.call(iterable); } if (Symbol.iterator) { method = iterable[Symbol.iterator]; if (method != null) return method.call(iterable); } } throw new TypeError("Object is not async iterable"); }
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var _require = require('../common'),
- mustCall = _require.mustCall;
-
-var once = require('events.once');
-
-var _require2 = require('../../'),
- Readable = _require2.Readable;
-
-var _require3 = require('assert/'),
- strictEqual = _require3.strictEqual;
-
-function toReadableBasicSupport() {
- return _toReadableBasicSupport.apply(this, arguments);
+ error() {}
}
+const { mustCall } = require('../common')
-function _toReadableBasicSupport() {
- _toReadableBasicSupport = _asyncToGenerator(function* () {
- function generate() {
- return _generate.apply(this, arguments);
- }
+const { once } = require('events')
- function _generate() {
- _generate = _wrapAsyncGenerator(function* () {
- yield 'a';
- yield 'b';
- yield 'c';
- });
- return _generate.apply(this, arguments);
- }
-
- var stream = Readable.from(generate());
- var expected = ['a', 'b', 'c'];
- var _iteratorNormalCompletion = true;
- var _didIteratorError = false;
+const { Readable } = require('../../lib/ours/index')
- var _iteratorError;
+const { strictEqual, throws } = require('assert')
- try {
- for (var _iterator = _asyncIterator(stream), _step, _value; _step = yield _iterator.next(), _iteratorNormalCompletion = _step.done, _value = yield _step.value, !_iteratorNormalCompletion; _iteratorNormalCompletion = true) {
- var chunk = _value;
- strictEqual(chunk, expected.shift());
- }
- } catch (err) {
- _didIteratorError = true;
- _iteratorError = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion && _iterator.return != null) {
- yield _iterator.return();
- }
- } finally {
- if (_didIteratorError) {
- throw _iteratorError;
- }
- }
- }
- });
- return _toReadableBasicSupport.apply(this, arguments);
-}
+const common = require('../common')
-function toReadableSyncIterator() {
- return _toReadableSyncIterator.apply(this, arguments);
+{
+ throws(() => {
+ Readable.from(null)
+ }, /ERR_INVALID_ARG_TYPE/)
}
-function _toReadableSyncIterator() {
- _toReadableSyncIterator = _asyncToGenerator(function* () {
- function* generate() {
- yield 'a';
- yield 'b';
- yield 'c';
- }
-
- var stream = Readable.from(generate());
- var expected = ['a', 'b', 'c'];
- var _iteratorNormalCompletion2 = true;
- var _didIteratorError2 = false;
+async function toReadableBasicSupport() {
+ async function* generate() {
+ yield 'a'
+ yield 'b'
+ yield 'c'
+ }
- var _iteratorError2;
+ const stream = Readable.from(generate())
+ const expected = ['a', 'b', 'c']
- try {
- for (var _iterator2 = _asyncIterator(stream), _step2, _value2; _step2 = yield _iterator2.next(), _iteratorNormalCompletion2 = _step2.done, _value2 = yield _step2.value, !_iteratorNormalCompletion2; _iteratorNormalCompletion2 = true) {
- var chunk = _value2;
- strictEqual(chunk, expected.shift());
- }
- } catch (err) {
- _didIteratorError2 = true;
- _iteratorError2 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion2 && _iterator2.return != null) {
- yield _iterator2.return();
- }
- } finally {
- if (_didIteratorError2) {
- throw _iteratorError2;
- }
- }
- }
- });
- return _toReadableSyncIterator.apply(this, arguments);
-}
-
-function toReadablePromises() {
- return _toReadablePromises.apply(this, arguments);
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
}
-function _toReadablePromises() {
- _toReadablePromises = _asyncToGenerator(function* () {
- var promises = [Promise.resolve('a'), Promise.resolve('b'), Promise.resolve('c')];
- var stream = Readable.from(promises);
- var expected = ['a', 'b', 'c'];
- var _iteratorNormalCompletion3 = true;
- var _didIteratorError3 = false;
-
- var _iteratorError3;
+async function toReadableSyncIterator() {
+ function* generate() {
+ yield 'a'
+ yield 'b'
+ yield 'c'
+ }
- try {
- for (var _iterator3 = _asyncIterator(stream), _step3, _value3; _step3 = yield _iterator3.next(), _iteratorNormalCompletion3 = _step3.done, _value3 = yield _step3.value, !_iteratorNormalCompletion3; _iteratorNormalCompletion3 = true) {
- var chunk = _value3;
- strictEqual(chunk, expected.shift());
- }
- } catch (err) {
- _didIteratorError3 = true;
- _iteratorError3 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion3 && _iterator3.return != null) {
- yield _iterator3.return();
- }
- } finally {
- if (_didIteratorError3) {
- throw _iteratorError3;
- }
- }
- }
- });
- return _toReadablePromises.apply(this, arguments);
-}
+ const stream = Readable.from(generate())
+ const expected = ['a', 'b', 'c']
-function toReadableString() {
- return _toReadableString.apply(this, arguments);
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
}
-function _toReadableString() {
- _toReadableString = _asyncToGenerator(function* () {
- var stream = Readable.from('abc');
- var expected = ['a', 'b', 'c'];
- var _iteratorNormalCompletion4 = true;
- var _didIteratorError4 = false;
+async function toReadablePromises() {
+ const promises = [Promise.resolve('a'), Promise.resolve('b'), Promise.resolve('c')]
+ const stream = Readable.from(promises)
+ const expected = ['a', 'b', 'c']
- var _iteratorError4;
-
- try {
- for (var _iterator4 = _asyncIterator(stream), _step4, _value4; _step4 = yield _iterator4.next(), _iteratorNormalCompletion4 = _step4.done, _value4 = yield _step4.value, !_iteratorNormalCompletion4; _iteratorNormalCompletion4 = true) {
- var chunk = _value4;
- strictEqual(chunk, expected.shift());
- }
- } catch (err) {
- _didIteratorError4 = true;
- _iteratorError4 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion4 && _iterator4.return != null) {
- yield _iterator4.return();
- }
- } finally {
- if (_didIteratorError4) {
- throw _iteratorError4;
- }
- }
- }
- });
- return _toReadableString.apply(this, arguments);
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
}
-function toReadableOnData() {
- return _toReadableOnData.apply(this, arguments);
-}
+async function toReadableString() {
+ const stream = Readable.from('abc')
+ const expected = ['abc']
-function _toReadableOnData() {
- _toReadableOnData = _asyncToGenerator(function* () {
- function generate() {
- return _generate2.apply(this, arguments);
- }
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
+}
- function _generate2() {
- _generate2 = _wrapAsyncGenerator(function* () {
- yield 'a';
- yield 'b';
- yield 'c';
- });
- return _generate2.apply(this, arguments);
- }
+async function toReadableBuffer() {
+ const stream = Readable.from(Buffer.from('abc'))
+ const expected = ['abc']
- var stream = Readable.from(generate());
- var iterations = 0;
- var expected = ['a', 'b', 'c'];
- stream.on('data', function (chunk) {
- iterations++;
- strictEqual(chunk, expected.shift());
- });
- yield once(stream, 'end');
- strictEqual(iterations, 3);
- });
- return _toReadableOnData.apply(this, arguments);
+ for await (const chunk of stream) {
+ strictEqual(chunk.toString(), expected.shift())
+ }
}
-function toReadableOnDataNonObject() {
- return _toReadableOnDataNonObject.apply(this, arguments);
-}
+async function toReadableOnData() {
+ async function* generate() {
+ yield 'a'
+ yield 'b'
+ yield 'c'
+ }
-function _toReadableOnDataNonObject() {
- _toReadableOnDataNonObject = _asyncToGenerator(function* () {
- function generate() {
- return _generate3.apply(this, arguments);
- }
+ const stream = Readable.from(generate())
+ let iterations = 0
+ const expected = ['a', 'b', 'c']
+ stream.on('data', (chunk) => {
+ iterations++
+ strictEqual(chunk, expected.shift())
+ })
+ await once(stream, 'end')
+ strictEqual(iterations, 3)
+}
- function _generate3() {
- _generate3 = _wrapAsyncGenerator(function* () {
- yield 'a';
- yield 'b';
- yield 'c';
- });
- return _generate3.apply(this, arguments);
- }
+async function toReadableOnDataNonObject() {
+ async function* generate() {
+ yield 'a'
+ yield 'b'
+ yield 'c'
+ }
- var stream = Readable.from(generate(), {
- objectMode: false
- });
- var iterations = 0;
- var expected = ['a', 'b', 'c'];
- stream.on('data', function (chunk) {
- iterations++;
- strictEqual(chunk instanceof Buffer, true);
- strictEqual(chunk.toString(), expected.shift());
- });
- yield once(stream, 'end');
- strictEqual(iterations, 3);
- });
- return _toReadableOnDataNonObject.apply(this, arguments);
+ const stream = Readable.from(generate(), {
+ objectMode: false
+ })
+ let iterations = 0
+ const expected = ['a', 'b', 'c']
+ stream.on('data', (chunk) => {
+ iterations++
+ strictEqual(chunk instanceof Buffer, true)
+ strictEqual(chunk.toString(), expected.shift())
+ })
+ await once(stream, 'end')
+ strictEqual(iterations, 3)
}
-function destroysTheStreamWhenThrowing() {
- return _destroysTheStreamWhenThrowing.apply(this, arguments);
-}
+async function destroysTheStreamWhenThrowing() {
+ async function* generate() {
+ // eslint-disable-line require-yield
+ throw new Error('kaboom')
+ }
-function _destroysTheStreamWhenThrowing() {
- _destroysTheStreamWhenThrowing = _asyncToGenerator(function* () {
- function generate() {
- return _generate4.apply(this, arguments);
- }
+ const stream = Readable.from(generate())
+ stream.read()
+ const [err] = await once(stream, 'error')
+ strictEqual(err.message, 'kaboom')
+ strictEqual(stream.destroyed, true)
+}
- function _generate4() {
- _generate4 = _wrapAsyncGenerator(function* () {
- throw new Error('kaboom');
- });
- return _generate4.apply(this, arguments);
+async function asTransformStream() {
+ async function* generate(stream) {
+ for await (const chunk of stream) {
+ yield chunk.toUpperCase()
}
+ }
- var stream = Readable.from(generate());
- stream.read();
+ const source = new Readable({
+ objectMode: true,
- try {
- yield once(stream, 'error');
- } catch (err) {
- strictEqual(err.message, 'kaboom');
- strictEqual(stream.destroyed, true);
+ read() {
+ this.push('a')
+ this.push('b')
+ this.push('c')
+ this.push(null)
}
- });
- return _destroysTheStreamWhenThrowing.apply(this, arguments);
-}
+ })
+ const stream = Readable.from(generate(source))
+ const expected = ['A', 'B', 'C']
-function asTransformStream() {
- return _asTransformStream.apply(this, arguments);
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
}
-function _asTransformStream() {
- _asTransformStream = _asyncToGenerator(function* () {
- function generate(_x) {
- return _generate5.apply(this, arguments);
- }
-
- function _generate5() {
- _generate5 = _wrapAsyncGenerator(function* (stream) {
- var _iteratorNormalCompletion6 = true;
- var _didIteratorError6 = false;
-
- var _iteratorError6;
-
- try {
- for (var _iterator6 = _asyncIterator(stream), _step6, _value6; _step6 = yield _awaitAsyncGenerator(_iterator6.next()), _iteratorNormalCompletion6 = _step6.done, _value6 = yield _awaitAsyncGenerator(_step6.value), !_iteratorNormalCompletion6; _iteratorNormalCompletion6 = true) {
- var chunk = _value6;
- yield chunk.toUpperCase();
- }
- } catch (err) {
- _didIteratorError6 = true;
- _iteratorError6 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion6 && _iterator6.return != null) {
- yield _awaitAsyncGenerator(_iterator6.return());
- }
- } finally {
- if (_didIteratorError6) {
- throw _iteratorError6;
- }
- }
- }
- });
- return _generate5.apply(this, arguments);
- }
-
- var source = new Readable({
- objectMode: true,
- read: function read() {
- this.push('a');
- this.push('b');
- this.push('c');
- this.push(null);
- }
- });
- var stream = Readable.from(generate(source));
- var expected = ['A', 'B', 'C'];
- var _iteratorNormalCompletion5 = true;
- var _didIteratorError5 = false;
-
- var _iteratorError5;
-
+async function endWithError() {
+ async function* generate() {
+ yield 1
+ yield 2
+ yield Promise.reject('Boum')
+ }
+
+ const stream = Readable.from(generate())
+ const expected = [1, 2]
+
+ try {
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
+
+ throw new Error()
+ } catch (err) {
+ strictEqual(expected.length, 0)
+ strictEqual(err, 'Boum')
+ }
+}
+
+async function destroyingStreamWithErrorThrowsInGenerator() {
+ const validateError = common.mustCall((e) => {
+ strictEqual(e, 'Boum')
+ })
+
+ async function* generate() {
try {
- for (var _iterator5 = _asyncIterator(stream), _step5, _value5; _step5 = yield _iterator5.next(), _iteratorNormalCompletion5 = _step5.done, _value5 = yield _step5.value, !_iteratorNormalCompletion5; _iteratorNormalCompletion5 = true) {
- var chunk = _value5;
- strictEqual(chunk, expected.shift());
- }
- } catch (err) {
- _didIteratorError5 = true;
- _iteratorError5 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion5 && _iterator5.return != null) {
- yield _iterator5.return();
- }
- } finally {
- if (_didIteratorError5) {
- throw _iteratorError5;
- }
- }
- }
- });
- return _asTransformStream.apply(this, arguments);
-}
-
-Promise.all([toReadableBasicSupport(), toReadableSyncIterator(), toReadablePromises(), toReadableString(), toReadableOnData(), toReadableOnDataNonObject(), destroysTheStreamWhenThrowing(), asTransformStream()]).then(mustCall());
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ yield 1
+ yield 2
+ yield 3
+ throw new Error()
+ } catch (e) {
+ validateError(e)
+ }
+ }
+
+ const stream = Readable.from(generate())
+ stream.read()
+ stream.once('error', common.mustCall())
+ stream.destroy('Boum')
+}
+
+Promise.all([
+ toReadableBasicSupport(),
+ toReadableSyncIterator(),
+ toReadablePromises(),
+ toReadableString(),
+ toReadableBuffer(),
+ toReadableOnData(),
+ toReadableOnDataNonObject(),
+ destroysTheStreamWhenThrowing(),
+ asTransformStream(),
+ endWithError(),
+ destroyingStreamWithErrorThrowsInGenerator()
+]).then(mustCall())
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-readable-large-hwm.js b/test/parallel/test-readable-large-hwm.js
index 9e47f0f487..53251d6ec8 100644
--- a/test/parallel/test-readable-large-hwm.js
+++ b/test/parallel/test-readable-large-hwm.js
@@ -1,50 +1,43 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var _require = require('../../'),
- Readable = _require.Readable; // Make sure that readable completes
+const { Readable } = require('../../lib/ours/index') // Make sure that readable completes
// even when reading larger buffer.
-
-var bufferSize = 10 * 1024 * 1024;
-var n = 0;
-var r = new Readable({
- read: function read() {
+const bufferSize = 10 * 1024 * 1024
+let n = 0
+const r = new Readable({
+ read() {
// Try to fill readable buffer piece by piece.
- r.push(bufferShim.alloc(bufferSize / 10));
+ r.push(Buffer.alloc(bufferSize / 10))
if (n++ > 10) {
- r.push(null);
+ r.push(null)
}
}
-});
-r.on('readable', function () {
+})
+r.on('readable', () => {
while (true) {
- var ret = r.read(bufferSize);
- if (ret === null) break;
+ const ret = r.read(bufferSize)
+ if (ret === null) break
}
-});
-r.on('end', common.mustCall());
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+r.on('end', common.mustCall())
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-readable-single-end.js b/test/parallel/test-readable-single-end.js
index b50c36fffa..df4cced655 100644
--- a/test/parallel/test-readable-single-end.js
+++ b/test/parallel/test-readable-single-end.js
@@ -1,37 +1,30 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var _require = require('../../'),
- Readable = _require.Readable; // This test ensures that there will not be an additional empty 'readable'
+const { Readable } = require('../../lib/ours/index') // This test ensures that there will not be an additional empty 'readable'
// event when stream has ended (only 1 event signalling about end)
-
-var r = new Readable({
- read: function read() {}
-});
-r.push(null);
-r.on('readable', common.mustCall());
-r.on('end', common.mustCall());
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+const r = new Readable({
+ read: () => {}
+})
+r.push(null)
+r.on('readable', common.mustCall())
+r.on('end', common.mustCall())
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-add-abort-signal.js b/test/parallel/test-stream-add-abort-signal.js
new file mode 100644
index 0000000000..976c72b206
--- /dev/null
+++ b/test/parallel/test-stream-add-abort-signal.js
@@ -0,0 +1,59 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+// Flags: --expose-internals
+
+;('use strict')
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+require('../common')
+
+const assert = require('assert')
+
+const { addAbortSignal, Readable } = require('../../lib/ours/index')
+
+const { addAbortSignalNoValidate } = require('../../lib/internal/streams/add-abort-signal')
+
+{
+ assert.throws(() => {
+ addAbortSignal('INVALID_SIGNAL')
+ }, /ERR_INVALID_ARG_TYPE/)
+ const ac = new AbortController()
+ assert.throws(() => {
+ addAbortSignal(ac.signal, 'INVALID_STREAM')
+ }, /ERR_INVALID_ARG_TYPE/)
+}
+{
+ const r = new Readable({
+ read: () => {}
+ })
+ assert.deepStrictEqual(r, addAbortSignalNoValidate('INVALID_SIGNAL', r))
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-aliases-legacy.js b/test/parallel/test-stream-aliases-legacy.js
new file mode 100644
index 0000000000..759665ef00
--- /dev/null
+++ b/test/parallel/test-stream-aliases-legacy.js
@@ -0,0 +1,30 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+require('../common')
+
+const assert = require('assert')
+
+const stream = require('../../lib/ours/index') // Verify that all individual aliases are left in place.
+
+assert.strictEqual(stream.Readable, require('../../lib/_stream_readable'))
+assert.strictEqual(stream.Writable, require('../../lib/_stream_writable'))
+assert.strictEqual(stream.Duplex, require('../../lib/_stream_duplex'))
+assert.strictEqual(stream.Transform, require('../../lib/_stream_transform'))
+assert.strictEqual(stream.PassThrough, require('../../lib/_stream_passthrough'))
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-asIndexedPairs.mjs b/test/parallel/test-stream-asIndexedPairs.mjs
new file mode 100644
index 0000000000..a103920eef
--- /dev/null
+++ b/test/parallel/test-stream-asIndexedPairs.mjs
@@ -0,0 +1,64 @@
+import '../common/index.mjs';
+import { Readable }from '../../lib/ours/index.js';
+import { deepStrictEqual, rejects, throws } from 'assert';
+import tap from 'tap';
+
+{
+ // asIndexedPairs with a synchronous stream
+ const pairs = await Readable.from([1, 2, 3]).asIndexedPairs().toArray();
+ deepStrictEqual(pairs, [[0, 1], [1, 2], [2, 3]]);
+ const empty = await Readable.from([]).asIndexedPairs().toArray();
+ deepStrictEqual(empty, []);
+}
+
+{
+ // asIndexedPairs works an asynchronous streams
+ const asyncFrom = (...args) => Readable.from(...args).map(async (x) => x);
+ const pairs = await asyncFrom([1, 2, 3]).asIndexedPairs().toArray();
+ deepStrictEqual(pairs, [[0, 1], [1, 2], [2, 3]]);
+ const empty = await asyncFrom([]).asIndexedPairs().toArray();
+ deepStrictEqual(empty, []);
+}
+
+{
+ // Does not enumerate an infinite stream
+ const infinite = () => Readable.from(async function* () {
+ while (true) yield 1;
+ }());
+ const pairs = await infinite().asIndexedPairs().take(3).toArray();
+ deepStrictEqual(pairs, [[0, 1], [1, 1], [2, 1]]);
+ const empty = await infinite().asIndexedPairs().take(0).toArray();
+ deepStrictEqual(empty, []);
+}
+
+{
+ // AbortSignal
+ await rejects(async () => {
+ const ac = new AbortController();
+ const { signal } = ac;
+ const p = Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray();
+ ac.abort();
+ await p;
+ }, { name: 'AbortError' });
+
+ await rejects(async () => {
+ const signal = AbortSignal.abort();
+ await Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray();
+ }, /AbortError/);
+}
+
+{
+ // Error cases
+ throws(() => Readable.from([1]).asIndexedPairs(1), /ERR_INVALID_ARG_TYPE/);
+ throws(() => Readable.from([1]).asIndexedPairs({ signal: true }), /ERR_INVALID_ARG_TYPE/);
+}
+
+ /* replacement start */
+ process.on('beforeExit', (code) => {
+ if(code === 0) {
+ tap.pass('test succeeded');
+ } else {
+ tap.fail(`test failed - exited code ${code}`);
+ }
+ });
+ /* replacement end */
diff --git a/test/parallel/test-stream-auto-destroy.js b/test/parallel/test-stream-auto-destroy.js
index 93338b4c20..77454f4140 100644
--- a/test/parallel/test-stream-auto-destroy.js
+++ b/test/parallel/test-stream-auto-destroy.js
@@ -1,99 +1,139 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
{
- var r = new stream.Readable({
+ const r = new stream.Readable({
autoDestroy: true,
- read: function read() {
- this.push('hello');
- this.push('world');
- this.push(null);
+
+ read() {
+ this.push('hello')
+ this.push('world')
+ this.push(null)
},
- destroy: common.mustCall(function (err, cb) {
- return cb();
+
+ destroy: common.mustCall((err, cb) => cb())
+ })
+ let ended = false
+ r.resume()
+ r.on(
+ 'end',
+ common.mustCall(() => {
+ ended = true
})
- });
- var ended = false;
- r.resume();
- r.on('end', common.mustCall(function () {
- ended = true;
- }));
- r.on('close', common.mustCall(function () {
- assert(ended);
- }));
+ )
+ r.on(
+ 'close',
+ common.mustCall(() => {
+ assert(ended)
+ })
+ )
}
{
- var w = new stream.Writable({
+ const w = new stream.Writable({
autoDestroy: true,
- write: function write(data, enc, cb) {
- cb(null);
+
+ write(data, enc, cb) {
+ cb(null)
},
- destroy: common.mustCall(function (err, cb) {
- return cb();
+
+ destroy: common.mustCall((err, cb) => cb())
+ })
+ let finished = false
+ w.write('hello')
+ w.write('world')
+ w.end()
+ w.on(
+ 'finish',
+ common.mustCall(() => {
+ finished = true
})
- });
- var finished = false;
- w.write('hello');
- w.write('world');
- w.end();
- w.on('finish', common.mustCall(function () {
- finished = true;
- }));
- w.on('close', common.mustCall(function () {
- assert(finished);
- }));
+ )
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ assert(finished)
+ })
+ )
}
{
- var t = new stream.Transform({
+ const t = new stream.Transform({
autoDestroy: true,
- transform: function transform(data, enc, cb) {
- cb(null, data);
+
+ transform(data, enc, cb) {
+ cb(null, data)
},
- destroy: common.mustCall(function (err, cb) {
- return cb();
+
+ destroy: common.mustCall((err, cb) => cb())
+ })
+ let ended = false
+ let finished = false
+ t.write('hello')
+ t.write('world')
+ t.end()
+ t.resume()
+ t.on(
+ 'end',
+ common.mustCall(() => {
+ ended = true
+ })
+ )
+ t.on(
+ 'finish',
+ common.mustCall(() => {
+ finished = true
+ })
+ )
+ t.on(
+ 'close',
+ common.mustCall(() => {
+ assert(ended)
+ assert(finished)
})
- });
- var _ended = false;
- var _finished = false;
- t.write('hello');
- t.write('world');
- t.end();
- t.resume();
- t.on('end', common.mustCall(function () {
- _ended = true;
- }));
- t.on('finish', common.mustCall(function () {
- _finished = true;
- }));
- t.on('close', common.mustCall(function () {
- assert(_ended);
- assert(_finished);
- }));
+ )
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+{
+ const r = new stream.Readable({
+ read() {
+ r2.emit('error', new Error('fail'))
+ }
+ })
+ const r2 = new stream.Readable({
+ autoDestroy: true,
+ destroy: common.mustCall((err, cb) => cb())
+ })
+ r.pipe(r2)
+}
+{
+ const r = new stream.Readable({
+ read() {
+ w.emit('error', new Error('fail'))
+ }
+ })
+ const w = new stream.Writable({
+ autoDestroy: true,
+ destroy: common.mustCall((err, cb) => cb())
+ })
+ r.pipe(w)
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js b/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js
new file mode 100644
index 0000000000..7a6f7d683f
--- /dev/null
+++ b/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { PassThrough } = require('../../lib/ours/index')
+
+const encode = new PassThrough({
+ highWaterMark: 1
+})
+const decode = new PassThrough({
+ highWaterMark: 1
+})
+const send = common.mustCall((buf) => {
+ encode.write(buf)
+}, 4)
+let i = 0
+const onData = common.mustCall(() => {
+ if (++i === 2) {
+ send(Buffer.from([0x3]))
+ send(Buffer.from([0x4]))
+ }
+}, 4)
+encode.pipe(decode).on('data', onData)
+send(Buffer.from([0x1]))
+send(Buffer.from([0x2]))
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-backpressure.js b/test/parallel/test-stream-backpressure.js
index 35da341a75..d9072aec52 100644
--- a/test/parallel/test-stream-backpressure.js
+++ b/test/parallel/test-stream-backpressure.js
@@ -1,59 +1,53 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var pushes = 0;
-var total = 65500 + 40 * 1024;
-var rs = new stream.Readable({
+let pushes = 0
+const total = 65500 + 40 * 1024
+const rs = new stream.Readable({
read: common.mustCall(function () {
if (pushes++ === 10) {
- this.push(null);
- return;
+ this.push(null)
+ return
}
- var length = this._readableState.length; // We are at most doing two full runs of _reads
+ const length = this._readableState.length // We are at most doing two full runs of _reads
// before stopping, because Readable is greedy
// to keep its buffer full
- assert(length <= total);
- this.push(bufferShim.alloc(65500));
+ assert(length <= total)
+ this.push(Buffer.alloc(65500))
- for (var i = 0; i < 40; i++) {
- this.push(bufferShim.alloc(1024));
+ for (let i = 0; i < 40; i++) {
+ this.push(Buffer.alloc(1024))
} // We will be over highWaterMark at this point
// but a new call to _read is scheduled anyway.
-
}, 11)
-});
-var ws = stream.Writable({
+})
+const ws = stream.Writable({
write: common.mustCall(function (data, enc, cb) {
- setImmediate(cb);
+ setImmediate(cb)
}, 41 * 10)
-});
-rs.pipe(ws);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+rs.pipe(ws)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-base-prototype-accessors-enumerability.js b/test/parallel/test-stream-base-prototype-accessors-enumerability.js
new file mode 100644
index 0000000000..c58a0f8176
--- /dev/null
+++ b/test/parallel/test-stream-base-prototype-accessors-enumerability.js
@@ -0,0 +1,34 @@
+// Flags: --expose-internals
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+require('../common') // This tests that the prototype accessors added by StreamBase::AddMethods
+// are not enumerable. They could be enumerated when inspecting the prototype
+// with util.inspect or the inspector protocol.
+
+const assert = require('assert') // Or anything that calls StreamBase::AddMethods when setting up its prototype
+
+const internalBinding = process.binding
+const TTY = internalBinding('tty_wrap').TTY
+{
+ const ttyIsEnumerable = Object.prototype.propertyIsEnumerable.bind(TTY)
+ assert.strictEqual(ttyIsEnumerable('bytesRead'), false)
+ assert.strictEqual(ttyIsEnumerable('fd'), false)
+ assert.strictEqual(ttyIsEnumerable('_externalStream'), false)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-base-typechecking.js b/test/parallel/test-stream-base-typechecking.js
new file mode 100644
index 0000000000..fdf895dfbe
--- /dev/null
+++ b/test/parallel/test-stream-base-typechecking.js
@@ -0,0 +1,47 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const net = require('net')
+
+const server = net.createServer().listen(
+ 0,
+ common.mustCall(() => {
+ const client = net.connect(
+ server.address().port,
+ common.mustCall(() => {
+ assert.throws(
+ () => {
+ client.write('broken', 'buffer')
+ },
+ {
+ name: 'TypeError',
+ code: 'ERR_INVALID_ARG_TYPE',
+ message: 'Second argument must be a buffer'
+ }
+ )
+ client.destroy()
+ server.close()
+ })
+ )
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-big-packet.js b/test/parallel/test-stream-big-packet.js
index 063dee3e1e..f6591a7b59 100644
--- a/test/parallel/test-stream-big-packet.js
+++ b/test/parallel/test-stream-big-packet.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,90 +18,66 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-require('../common');
+const tap = require('tap')
-var assert = require('assert/');
+const silentConsole = {
+ log() {},
-var stream = require('../../');
-
-var passed = false;
-
-var TestStream =
-/*#__PURE__*/
-function (_stream$Transform) {
- _inherits(TestStream, _stream$Transform);
+ error() {}
+}
+require('../common')
- function TestStream() {
- _classCallCheck(this, TestStream);
+const assert = require('assert')
- return _possibleConstructorReturn(this, _getPrototypeOf(TestStream).apply(this, arguments));
- }
+const stream = require('../../lib/ours/index')
- _createClass(TestStream, [{
- key: "_transform",
- value: function _transform(chunk, encoding, done) {
- if (!passed) {
- // Char 'a' only exists in the last write
- passed = chunk.toString().includes('a');
- }
+let passed = false
- done();
+class TestStream extends stream.Transform {
+ _transform(chunk, encoding, done) {
+ if (!passed) {
+ // Char 'a' only exists in the last write
+ passed = chunk.toString().includes('a')
}
- }]);
- return TestStream;
-}(stream.Transform);
+ done()
+ }
+}
-var s1 = new stream.PassThrough();
-var s2 = new stream.PassThrough();
-var s3 = new TestStream();
-s1.pipe(s3); // Don't let s2 auto close which may close s3
+const s1 = new stream.Transform({
+ transform(chunk, encoding, cb) {
+ process.nextTick(cb, null, chunk)
+ }
+})
+const s2 = new stream.PassThrough()
+const s3 = new TestStream()
+s1.pipe(s3) // Don't let s2 auto close which may close s3
s2.pipe(s3, {
end: false
-}); // We must write a buffer larger than highWaterMark
+}) // We must write a buffer larger than highWaterMark
-var big = bufferShim.alloc(s1.writableHighWaterMark + 1, 'x'); // Since big is larger than highWaterMark, it will be buffered internally.
+const big = Buffer.alloc(s1.writableHighWaterMark + 1, 'x') // Since big is larger than highWaterMark, it will be buffered internally.
-assert(!s1.write(big)); // 'tiny' is small enough to pass through internal buffer.
+assert(!s1.write(big)) // 'tiny' is small enough to pass through internal buffer.
-assert(s2.write('tiny')); // Write some small data in next IO loop, which will never be written to s3
+assert(s2.write('tiny')) // Write some small data in next IO loop, which will never be written to s3
// Because 'drain' event is not emitted from s1 and s1 is still paused
-setImmediate(s1.write.bind(s1), 'later'); // Assert after two IO loops when all operations have been done.
+setImmediate(s1.write.bind(s1), 'later') // Assert after two IO loops when all operations have been done.
process.on('exit', function () {
- assert(passed, 'Large buffer is not handled properly by Writable Stream');
-});
-
-function indexOf(xs, x) {
- for (var i = 0, l = xs.length; i < l; i++) {
- if (xs[i] === x) return i;
+ assert(passed, 'Large buffer is not handled properly by Writable Stream')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
}
-
- return -1;
-}
-
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-big-push.js b/test/parallel/test-stream-big-push.js
index 54ef1ca0ad..b48eb437be 100644
--- a/test/parallel/test-stream-big-push.js
+++ b/test/parallel/test-stream-big-push.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,76 +18,71 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var str = 'asdfasdfasdfasdfasdf';
-var r = new stream.Readable({
+const str = 'asdfasdfasdfasdfasdf'
+const r = new stream.Readable({
highWaterMark: 5,
encoding: 'utf8'
-});
-var reads = 0;
+})
+let reads = 0
function _read() {
if (reads === 0) {
- setTimeout(function () {
- r.push(str);
- }, 1);
- reads++;
+ setTimeout(() => {
+ r.push(str)
+ }, 1)
+ reads++
} else if (reads === 1) {
- var _ret = r.push(str);
-
- assert.strictEqual(_ret, false);
- reads++;
+ const ret = r.push(str)
+ assert.strictEqual(ret, false)
+ reads++
} else {
- r.push(null);
+ r.push(null)
}
}
-r._read = common.mustCall(_read, 3);
-r.on('end', common.mustCall()); // push some data in to start.
-// we've never gotten any read event at this point.
+r._read = common.mustCall(_read, 3)
+r.on('end', common.mustCall()) // Push some data in to start.
+// We've never gotten any read event at this point.
-var ret = r.push(str); // should be false. > hwm
+const ret = r.push(str) // Should be false. > hwm
-assert(!ret);
-var chunk = r.read();
-assert.strictEqual(chunk, str);
-chunk = r.read();
-assert.strictEqual(chunk, null);
-r.once('readable', function () {
- // this time, we'll get *all* the remaining data, because
+assert(!ret)
+let chunk = r.read()
+assert.strictEqual(chunk, str)
+chunk = r.read()
+assert.strictEqual(chunk, null)
+r.once('readable', () => {
+ // This time, we'll get *all* the remaining data, because
// it's been added synchronously, as the read WOULD take
// us below the hwm, and so it triggered a _read() again,
// which synchronously added more, which we then return.
- chunk = r.read();
- assert.strictEqual(chunk, str + str);
- chunk = r.read();
- assert.strictEqual(chunk, null);
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ chunk = r.read()
+ assert.strictEqual(chunk, str + str)
+ chunk = r.read()
+ assert.strictEqual(chunk, null)
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-buffer-list.js b/test/parallel/test-stream-buffer-list.js
index 85d1aea460..adcd14310d 100644
--- a/test/parallel/test-stream-buffer-list.js
+++ b/test/parallel/test-stream-buffer-list.js
@@ -1,47 +1,92 @@
-"use strict";
+// Flags: --expose-internals
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var BufferList = require('../../lib/internal/streams/buffer_list'); // Test empty buffer list.
+const BufferList = require('../../lib/internal/streams/buffer_list') // Test empty buffer list.
+const emptyList = new BufferList()
+emptyList.shift()
+assert.deepStrictEqual(emptyList, new BufferList())
+assert.strictEqual(emptyList.join(','), '')
+assert.deepStrictEqual(emptyList.concat(0), Buffer.alloc(0))
+const buf = Buffer.from('foo')
-var emptyList = new BufferList();
-emptyList.shift();
-assert.deepStrictEqual(emptyList, new BufferList());
-assert.strictEqual(emptyList.join(','), '');
-assert.deepStrictEqual(emptyList.concat(0), bufferShim.alloc(0));
-var buf = bufferShim.from('foo'); // Test buffer list with one element.
+function testIterator(list, count) {
+ // test iterator
+ let len = 0 // eslint-disable-next-line no-unused-vars
-var list = new BufferList();
-list.push(buf);
-var copy = list.concat(3);
-assert.notStrictEqual(copy, buf);
-assert.deepStrictEqual(copy, buf);
-assert.strictEqual(list.join(','), 'foo');
-var shifted = list.shift();
-assert.strictEqual(shifted, buf);
-assert.deepStrictEqual(list, new BufferList());
-;
+ for (const x of list) {
+ len++
+ }
-(function () {
- var t = require('tap');
+ assert.strictEqual(len, count)
+} // Test buffer list with one element.
- t.pass('sync run');
-})();
+const list = new BufferList()
+testIterator(list, 0)
+list.push(buf)
+testIterator(list, 1)
-var _list = process.listeners('uncaughtException');
+for (const x of list) {
+ assert.strictEqual(x, buf)
+}
-process.removeAllListeners('uncaughtException');
+const copy = list.concat(3)
+testIterator(copy, 3)
+assert.notStrictEqual(copy, buf)
+assert.deepStrictEqual(copy, buf)
+assert.strictEqual(list.join(','), 'foo')
+const shifted = list.shift()
+testIterator(list, 0)
+assert.strictEqual(shifted, buf)
+assert.deepStrictEqual(list, new BufferList())
+{
+ const list = new BufferList()
+ list.push('foo')
+ list.push('bar')
+ list.push('foo')
+ list.push('bar')
+ assert.strictEqual(list.consume(6, true), 'foobar')
+ assert.strictEqual(list.consume(6, true), 'foobar')
+}
+{
+ const list = new BufferList()
+ list.push('foo')
+ list.push('bar')
+ assert.strictEqual(list.consume(5, true), 'fooba')
+}
+{
+ const list = new BufferList()
+ list.push(buf)
+ list.push(buf)
+ list.push(buf)
+ list.push(buf)
+ assert.strictEqual(list.consume(6).toString(), 'foofoo')
+ assert.strictEqual(list.consume(6).toString(), 'foofoo')
+}
+{
+ const list = new BufferList()
+ list.push(buf)
+ list.push(buf)
+ assert.strictEqual(list.consume(5).toString(), 'foofo')
+}
+/* replacement start */
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-catch-rejections.js b/test/parallel/test-stream-catch-rejections.js
new file mode 100644
index 0000000000..9a39c0870c
--- /dev/null
+++ b/test/parallel/test-stream-catch-rejections.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const stream = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+{
+ const r = new stream.Readable({
+ captureRejections: true,
+
+ read() {}
+ })
+ r.push('hello')
+ r.push('world')
+ const err = new Error('kaboom')
+ r.on(
+ 'error',
+ common.mustCall((_err) => {
+ assert.strictEqual(err, _err)
+ assert.strictEqual(r.destroyed, true)
+ })
+ )
+ r.on('data', async () => {
+ throw err
+ })
+}
+{
+ const w = new stream.Writable({
+ captureRejections: true,
+ highWaterMark: 1,
+
+ write(chunk, enc, cb) {
+ process.nextTick(cb)
+ }
+ })
+ const err = new Error('kaboom')
+ w.write('hello', () => {
+ w.write('world')
+ })
+ w.on(
+ 'error',
+ common.mustCall((_err) => {
+ assert.strictEqual(err, _err)
+ assert.strictEqual(w.destroyed, true)
+ })
+ )
+ w.on(
+ 'drain',
+ common.mustCall(async () => {
+ throw err
+ }, 2)
+ )
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-compose.js b/test/parallel/test-stream-compose.js
new file mode 100644
index 0000000000..2cf56da13f
--- /dev/null
+++ b/test/parallel/test-stream-compose.js
@@ -0,0 +1,501 @@
+// Flags: --expose-internals
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable, Transform, Writable, finished, PassThrough } = require('../../lib/ours/index')
+
+const compose = require('../../lib/internal/streams/compose')
+
+const assert = require('assert')
+
+{
+ let res = ''
+ compose(
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk + chunk)
+ })
+ }),
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk.toString().toUpperCase())
+ })
+ })
+ )
+ .end('asd')
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res += buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASDASD')
+ })
+ )
+}
+{
+ let res = ''
+ compose(
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk + chunk
+ }
+ },
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk.toString().toUpperCase()
+ }
+ }
+ )
+ .end('asd')
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res += buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASDASD')
+ })
+ )
+}
+{
+ let res = ''
+ compose(async function* (source) {
+ for await (const chunk of source) {
+ yield chunk + chunk
+ }
+ })
+ .end('asd')
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res += buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'asdasd')
+ })
+ )
+}
+{
+ let res = ''
+ compose(
+ Readable.from(['asd']),
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk.toString().toUpperCase())
+ })
+ })
+ )
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res += buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASD')
+ })
+ )
+}
+{
+ let res = ''
+ compose(
+ (async function* () {
+ yield 'asd'
+ })(),
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk.toString().toUpperCase())
+ })
+ })
+ )
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res += buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASD')
+ })
+ )
+}
+{
+ let res = ''
+ compose(
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk.toString().toUpperCase())
+ })
+ }),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk
+ }
+ },
+ new Writable({
+ write: common.mustCall((chunk, encoding, callback) => {
+ res += chunk
+ callback(null)
+ })
+ })
+ )
+ .end('asd')
+ .on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASD')
+ })
+ )
+}
+{
+ let res = ''
+ compose(
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk.toString().toUpperCase())
+ })
+ }),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk
+ }
+ },
+ async function (source) {
+ for await (const chunk of source) {
+ res += chunk
+ }
+ }
+ )
+ .end('asd')
+ .on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASD')
+ })
+ )
+}
+{
+ let res
+ compose(
+ new Transform({
+ objectMode: true,
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, {
+ chunk
+ })
+ })
+ }),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk
+ }
+ },
+ new Transform({
+ objectMode: true,
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, {
+ chunk
+ })
+ })
+ })
+ )
+ .end(true)
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res = buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res.chunk.chunk, true)
+ })
+ )
+}
+{
+ const _err = new Error('asd')
+
+ compose(
+ new Transform({
+ objectMode: true,
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(_err)
+ })
+ }),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk
+ }
+ },
+ new Transform({
+ objectMode: true,
+ transform: common.mustNotCall((chunk, encoding, callback) => {
+ callback(null, {
+ chunk
+ })
+ })
+ })
+ )
+ .end(true)
+ .on('data', common.mustNotCall())
+ .on('end', common.mustNotCall())
+ .on('error', (err) => {
+ assert.strictEqual(err, _err)
+ })
+}
+{
+ const _err = new Error('asd')
+
+ compose(
+ new Transform({
+ objectMode: true,
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk)
+ })
+ }),
+ async function* (source) {
+ // eslint-disable-line require-yield
+ let tmp = ''
+
+ for await (const chunk of source) {
+ tmp += chunk
+ throw _err
+ }
+
+ return tmp
+ },
+ new Transform({
+ objectMode: true,
+ transform: common.mustNotCall((chunk, encoding, callback) => {
+ callback(null, {
+ chunk
+ })
+ })
+ })
+ )
+ .end(true)
+ .on('data', common.mustNotCall())
+ .on('end', common.mustNotCall())
+ .on('error', (err) => {
+ assert.strictEqual(err, _err)
+ })
+}
+{
+ let buf = '' // Convert into readable Duplex.
+
+ const s1 = compose(
+ (async function* () {
+ yield 'Hello'
+ yield 'World'
+ })(),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ },
+ async function (source) {
+ for await (const chunk of source) {
+ buf += chunk
+ }
+ }
+ )
+ assert.strictEqual(s1.writable, false)
+ assert.strictEqual(s1.readable, false)
+ finished(
+ s1.resume(),
+ common.mustCall((err) => {
+ assert(!err)
+ assert.strictEqual(buf, 'HELLOWORLD')
+ })
+ )
+}
+{
+ let buf = '' // Convert into transform duplex.
+
+ const s2 = compose(async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ })
+ s2.end('helloworld')
+ s2.resume()
+ s2.on('data', (chunk) => {
+ buf += chunk
+ })
+ finished(
+ s2.resume(),
+ common.mustCall((err) => {
+ assert(!err)
+ assert.strictEqual(buf, 'HELLOWORLD')
+ })
+ )
+}
+{
+ let buf = '' // Convert into readable Duplex.
+
+ const s1 = compose(
+ (async function* () {
+ yield 'Hello'
+ yield 'World'
+ })()
+ ) // Convert into transform duplex.
+
+ const s2 = compose(async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ }) // Convert into writable duplex.
+
+ const s3 = compose(async function (source) {
+ for await (const chunk of source) {
+ buf += chunk
+ }
+ })
+ const s4 = compose(s1, s2, s3)
+ finished(
+ s4,
+ common.mustCall((err) => {
+ assert(!err)
+ assert.strictEqual(buf, 'HELLOWORLD')
+ })
+ )
+}
+{
+ let buf = '' // Convert into readable Duplex.
+
+ const s1 = compose(
+ (async function* () {
+ yield 'Hello'
+ yield 'World'
+ })(),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ },
+ async function (source) {
+ for await (const chunk of source) {
+ buf += chunk
+ }
+ }
+ )
+ finished(
+ s1,
+ common.mustCall((err) => {
+ assert(!err)
+ assert.strictEqual(buf, 'HELLOWORLD')
+ })
+ )
+}
+{
+ try {
+ compose()
+ } catch (err) {
+ assert.strictEqual(err.code, 'ERR_MISSING_ARGS')
+ }
+}
+{
+ try {
+ compose(new Writable(), new PassThrough())
+ } catch (err) {
+ assert.strictEqual(err.code, 'ERR_INVALID_ARG_VALUE')
+ }
+}
+{
+ try {
+ compose(
+ new PassThrough(),
+ new Readable({
+ read() {}
+ }),
+ new PassThrough()
+ )
+ } catch (err) {
+ assert.strictEqual(err.code, 'ERR_INVALID_ARG_VALUE')
+ }
+}
+{
+ let buf = '' // Convert into readable Duplex.
+
+ const s1 = compose(
+ (async function* () {
+ yield 'Hello'
+ yield 'World'
+ })(),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ },
+ async function (source) {
+ for await (const chunk of source) {
+ buf += chunk
+ }
+
+ return buf
+ }
+ )
+ finished(
+ s1,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_INVALID_RETURN_VALUE')
+ })
+ )
+}
+{
+ let buf = '' // Convert into readable Duplex.
+
+ const s1 = compose(
+ 'HelloWorld',
+ async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ },
+ async function (source) {
+ for await (const chunk of source) {
+ buf += chunk
+ }
+ }
+ )
+ finished(
+ s1,
+ common.mustCall((err) => {
+ assert(!err)
+ assert.strictEqual(buf, 'HELLOWORLD')
+ })
+ )
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-construct.js b/test/parallel/test-stream-construct.js
new file mode 100644
index 0000000000..03aa33f862
--- /dev/null
+++ b/test/parallel/test-stream-construct.js
@@ -0,0 +1,340 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Writable, Readable, Duplex } = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+{
+ // Multiple callback.
+ new Writable({
+ construct: common.mustCall((callback) => {
+ callback()
+ callback()
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ code: 'ERR_MULTIPLE_CALLBACK'
+ })
+ )
+}
+{
+ // Multiple callback.
+ new Readable({
+ construct: common.mustCall((callback) => {
+ callback()
+ callback()
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ code: 'ERR_MULTIPLE_CALLBACK'
+ })
+ )
+}
+{
+ // Synchronous error.
+ new Writable({
+ construct: common.mustCall((callback) => {
+ callback(new Error('test'))
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'test'
+ })
+ )
+}
+{
+ // Synchronous error.
+ new Readable({
+ construct: common.mustCall((callback) => {
+ callback(new Error('test'))
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'test'
+ })
+ )
+}
+{
+ // Asynchronous error.
+ new Writable({
+ construct: common.mustCall((callback) => {
+ process.nextTick(callback, new Error('test'))
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'test'
+ })
+ )
+}
+{
+ // Asynchronous error.
+ new Readable({
+ construct: common.mustCall((callback) => {
+ process.nextTick(callback, new Error('test'))
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'test'
+ })
+ )
+}
+
+function testDestroy(factory) {
+ {
+ let constructed = false
+ const s = factory({
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ })
+ })
+ s.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.destroy()
+ }
+ {
+ let constructed = false
+ const s = factory({
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ })
+ })
+ s.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.destroy(null, () => {
+ assert.strictEqual(constructed, true)
+ })
+ }
+ {
+ let constructed = false
+ const s = factory({
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ })
+ })
+ s.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.destroy()
+ }
+ {
+ let constructed = false
+ const s = factory({
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ })
+ })
+ s.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'kaboom')
+ })
+ )
+ s.destroy(new Error('kaboom'), (err) => {
+ assert.strictEqual(err.message, 'kaboom')
+ assert.strictEqual(constructed, true)
+ })
+ }
+ {
+ let constructed = false
+ const s = factory({
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ })
+ })
+ s.on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.destroy(new Error())
+ }
+}
+
+testDestroy(
+ (opts) =>
+ new Readable({
+ read: common.mustNotCall(),
+ ...opts
+ })
+)
+testDestroy(
+ (opts) =>
+ new Writable({
+ write: common.mustNotCall(),
+ final: common.mustNotCall(),
+ ...opts
+ })
+)
+{
+ let constructed = false
+ const r = new Readable({
+ autoDestroy: true,
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ }),
+ read: common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ r.push(null)
+ })
+ })
+ r.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ r.on('data', common.mustNotCall())
+}
+{
+ let constructed = false
+ const w = new Writable({
+ autoDestroy: true,
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ }),
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(constructed, true)
+ process.nextTick(cb)
+ }),
+ final: common.mustCall((cb) => {
+ assert.strictEqual(constructed, true)
+ process.nextTick(cb)
+ })
+ })
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ w.end('data')
+}
+{
+ let constructed = false
+ const w = new Writable({
+ autoDestroy: true,
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ }),
+ write: common.mustNotCall(),
+ final: common.mustCall((cb) => {
+ assert.strictEqual(constructed, true)
+ process.nextTick(cb)
+ })
+ })
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ w.end()
+}
+{
+ new Duplex({
+ construct: common.mustCall()
+ })
+}
+{
+ // https://github.com/nodejs/node/issues/34448
+ let constructed = false
+ const d = new Duplex({
+ readable: false,
+ construct: common.mustCall((callback) => {
+ setImmediate(
+ common.mustCall(() => {
+ constructed = true
+ callback()
+ })
+ )
+ }),
+
+ write(chunk, encoding, callback) {
+ callback()
+ },
+
+ read() {
+ this.push(null)
+ }
+ })
+ d.resume()
+ d.end('foo')
+ d.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+}
+{
+ // Construct should not cause stream to read.
+ new Readable({
+ construct: common.mustCall((callback) => {
+ callback()
+ }),
+ read: common.mustNotCall()
+ })
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-decoder-objectmode.js b/test/parallel/test-stream-decoder-objectmode.js
index fe5a356734..32a5839a88 100644
--- a/test/parallel/test-stream-decoder-objectmode.js
+++ b/test/parallel/test-stream-decoder-objectmode.js
@@ -1,42 +1,37 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
-var readable = new stream.Readable({
- read: function read() {},
+const readable = new stream.Readable({
+ read: () => {},
encoding: 'utf16le',
objectMode: true
-});
-readable.push(bufferShim.from('abc', 'utf16le'));
-readable.push(bufferShim.from('def', 'utf16le'));
-readable.push(null); // Without object mode, these would be concatenated into a single chunk.
-
-assert.strictEqual(readable.read(), 'abc');
-assert.strictEqual(readable.read(), 'def');
-assert.strictEqual(readable.read(), null);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+readable.push(Buffer.from('abc', 'utf16le'))
+readable.push(Buffer.from('def', 'utf16le'))
+readable.push(null) // Without object mode, these would be concatenated into a single chunk.
+
+assert.strictEqual(readable.read(), 'abc')
+assert.strictEqual(readable.read(), 'def')
+assert.strictEqual(readable.read(), null)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-destroy-event-order.js b/test/parallel/test-stream-destroy-event-order.js
index 12db9e626a..8086a1733f 100644
--- a/test/parallel/test-stream-destroy-event-order.js
+++ b/test/parallel/test-stream-destroy-event-order.js
@@ -1,45 +1,45 @@
-"use strict";
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert/');
-
-var _require = require('../../'),
- Readable = _require.Readable;
-
-var rs = new Readable({
- read: function read() {}
-});
-var closed = false;
-var errored = false;
-rs.on('close', common.mustCall(function () {
- closed = true;
- assert(errored);
-}));
-rs.on('error', common.mustCall(function (err) {
- errored = true;
- assert(!closed);
-}));
-rs.destroy(new Error('kaboom'));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Readable } = require('../../lib/ours/index')
+
+const rs = new Readable({
+ read() {}
+})
+let closed = false
+let errored = false
+rs.on(
+ 'close',
+ common.mustCall(() => {
+ closed = true
+ assert(errored)
+ })
+)
+rs.on(
+ 'error',
+ common.mustCall((err) => {
+ errored = true
+ assert(!closed)
+ })
+)
+rs.destroy(new Error('kaboom'))
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-drop-take.js b/test/parallel/test-stream-drop-take.js
new file mode 100644
index 0000000000..a80d54b8e4
--- /dev/null
+++ b/test/parallel/test-stream-drop-take.js
@@ -0,0 +1,160 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+const { deepStrictEqual, rejects, throws } = require('assert')
+
+const { from } = Readable
+
+const fromAsync = (...args) => from(...args).map(async (x) => x)
+
+const naturals = () =>
+ from(
+ (async function* () {
+ let i = 1
+
+ while (true) {
+ yield i++
+ }
+ })()
+ )
+
+{
+ // Synchronous streams
+ ;(async () => {
+ deepStrictEqual(await from([1, 2, 3]).drop(2).toArray(), [3])
+ deepStrictEqual(await from([1, 2, 3]).take(1).toArray(), [1])
+ deepStrictEqual(await from([]).drop(2).toArray(), [])
+ deepStrictEqual(await from([]).take(1).toArray(), [])
+ deepStrictEqual(await from([1, 2, 3]).drop(1).take(1).toArray(), [2])
+ deepStrictEqual(await from([1, 2]).drop(0).toArray(), [1, 2])
+ deepStrictEqual(await from([1, 2]).take(0).toArray(), [])
+ })().then(common.mustCall()) // Asynchronous streams
+
+ ;(async () => {
+ deepStrictEqual(await fromAsync([1, 2, 3]).drop(2).toArray(), [3])
+ deepStrictEqual(await fromAsync([1, 2, 3]).take(1).toArray(), [1])
+ deepStrictEqual(await fromAsync([]).drop(2).toArray(), [])
+ deepStrictEqual(await fromAsync([]).take(1).toArray(), [])
+ deepStrictEqual(await fromAsync([1, 2, 3]).drop(1).take(1).toArray(), [2])
+ deepStrictEqual(await fromAsync([1, 2]).drop(0).toArray(), [1, 2])
+ deepStrictEqual(await fromAsync([1, 2]).take(0).toArray(), [])
+ })().then(common.mustCall()) // Infinite streams
+ // Asynchronous streams
+
+ ;(async () => {
+ deepStrictEqual(await naturals().take(1).toArray(), [1])
+ deepStrictEqual(await naturals().drop(1).take(1).toArray(), [2])
+ const next10 = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
+ deepStrictEqual(await naturals().drop(10).take(10).toArray(), next10)
+ deepStrictEqual(await naturals().take(5).take(1).toArray(), [1])
+ })().then(common.mustCall())
+}
+{
+ // Coercion
+ ;(async () => {
+ // The spec made me do this ^^
+ deepStrictEqual(await naturals().take('cat').toArray(), [])
+ deepStrictEqual(await naturals().take('2').toArray(), [1, 2])
+ deepStrictEqual(await naturals().take(true).toArray(), [1])
+ })().then(common.mustCall())
+}
+{
+ // Support for AbortSignal
+ const ac = new AbortController()
+ rejects(
+ Readable.from([1, 2, 3])
+ .take(1, {
+ signal: ac.signal
+ })
+ .toArray(),
+ {
+ name: 'AbortError'
+ }
+ ).then(common.mustCall())
+ rejects(
+ Readable.from([1, 2, 3])
+ .drop(1, {
+ signal: ac.signal
+ })
+ .toArray(),
+ {
+ name: 'AbortError'
+ }
+ ).then(common.mustCall())
+ ac.abort()
+}
+{
+ // Support for AbortSignal, already aborted
+ const signal = AbortSignal.abort()
+ rejects(
+ Readable.from([1, 2, 3])
+ .take(1, {
+ signal
+ })
+ .toArray(),
+ {
+ name: 'AbortError'
+ }
+ ).then(common.mustCall())
+}
+{
+ // Error cases
+ const invalidArgs = [-1, -Infinity, -40]
+
+ for (const example of invalidArgs) {
+ throws(() => from([]).take(example).toArray(), /ERR_OUT_OF_RANGE/)
+ }
+
+ throws(() => Readable.from([1]).drop(1, 1), /ERR_INVALID_ARG_TYPE/)
+ throws(
+ () =>
+ Readable.from([1]).drop(1, {
+ signal: true
+ }),
+ /ERR_INVALID_ARG_TYPE/
+ )
+ throws(() => Readable.from([1]).take(1, 1), /ERR_INVALID_ARG_TYPE/)
+ throws(
+ () =>
+ Readable.from([1]).take(1, {
+ signal: true
+ }),
+ /ERR_INVALID_ARG_TYPE/
+ )
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-destroy.js b/test/parallel/test-stream-duplex-destroy.js
index d163430858..52a4f3a03a 100644
--- a/test/parallel/test-stream-duplex-destroy.js
+++ b/test/parallel/test-stream-duplex-destroy.js
@@ -1,254 +1,309 @@
-"use strict";
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
-var common = require('../common');
-
-var _require = require('../../'),
- Duplex = _require.Duplex;
-
-var assert = require('assert/');
-
-{
- var duplex = new Duplex({
- write: function write(chunk, enc, cb) {
- cb();
- },
- read: function read() {}
- });
- duplex.resume();
- duplex.on('end', common.mustNotCall());
- duplex.on('finish', common.mustNotCall());
- duplex.on('close', common.mustCall());
- duplex.destroy();
- assert.strictEqual(duplex.destroyed, true);
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
}
-{
- var _duplex = new Duplex({
- write: function write(chunk, enc, cb) {
- cb();
- },
- read: function read() {}
- });
+/* replacement end */
- _duplex.resume();
+;('use strict')
- var expected = new Error('kaboom');
+const tap = require('tap')
- _duplex.on('end', common.mustNotCall());
+const silentConsole = {
+ log() {},
- _duplex.on('finish', common.mustNotCall());
+ error() {}
+}
+const common = require('../common')
- _duplex.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, expected);
- }));
+const { Duplex } = require('../../lib/ours/index')
- _duplex.destroy(expected);
+const assert = require('assert')
- assert.strictEqual(_duplex.destroyed, true);
-}
{
- var _duplex2 = new Duplex({
- write: function write(chunk, enc, cb) {
- cb();
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
},
- read: function read() {}
- });
- _duplex2._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, _expected);
- cb(err);
- });
-
- var _expected = new Error('kaboom');
-
- _duplex2.on('finish', common.mustNotCall('no finish event'));
-
- _duplex2.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, _expected);
- }));
-
- _duplex2.destroy(_expected);
-
- assert.strictEqual(_duplex2.destroyed, true);
+ read() {}
+ })
+ duplex.resume()
+ duplex.on('end', common.mustNotCall())
+ duplex.on('finish', common.mustNotCall())
+ duplex.on('close', common.mustCall())
+ duplex.destroy()
+ assert.strictEqual(duplex.destroyed, true)
}
{
- var _expected2 = new Error('kaboom');
-
- var _duplex3 = new Duplex({
- write: function write(chunk, enc, cb) {
- cb();
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
},
- read: function read() {},
- destroy: common.mustCall(function (err, cb) {
- assert.strictEqual(err, _expected2);
- cb();
- })
- });
-
- _duplex3.resume();
-
- _duplex3.on('end', common.mustNotCall('no end event'));
- _duplex3.on('finish', common.mustNotCall('no finish event')); // error is swallowed by the custom _destroy
-
-
- _duplex3.on('error', common.mustNotCall('no error event'));
-
- _duplex3.on('close', common.mustCall());
-
- _duplex3.destroy(_expected2);
-
- assert.strictEqual(_duplex3.destroyed, true);
+ read() {}
+ })
+ duplex.resume()
+ const expected = new Error('kaboom')
+ duplex.on('end', common.mustNotCall())
+ duplex.on('finish', common.mustNotCall())
+ duplex.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ duplex.destroy(expected)
+ assert.strictEqual(duplex.destroyed, true)
}
{
- var _duplex4 = new Duplex({
- write: function write(chunk, enc, cb) {
- cb();
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
},
- read: function read() {}
- });
-
- _duplex4._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, null);
- cb();
- });
-
- _duplex4.destroy();
- assert.strictEqual(_duplex4.destroyed, true);
+ read() {}
+ })
+ duplex._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb(err)
+ })
+ const expected = new Error('kaboom')
+ duplex.on('finish', common.mustNotCall('no finish event'))
+ duplex.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ duplex.destroy(expected)
+ assert.strictEqual(duplex.destroyed, true)
}
{
- var _duplex5 = new Duplex({
- write: function write(chunk, enc, cb) {
- cb();
+ const expected = new Error('kaboom')
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
},
- read: function read() {}
- });
-
- _duplex5.resume();
-
- _duplex5._destroy = common.mustCall(function (err, cb) {
- var _this = this;
-
- assert.strictEqual(err, null);
- process.nextTick(function () {
- _this.push(null);
-
- _this.end();
-
- cb();
- });
- });
- var fail = common.mustNotCall('no finish or end event');
-
- _duplex5.on('finish', fail);
-
- _duplex5.on('end', fail);
-
- _duplex5.destroy();
-
- _duplex5.removeListener('end', fail);
-
- _duplex5.removeListener('finish', fail);
-
- _duplex5.on('end', common.mustCall());
- _duplex5.on('finish', common.mustCall());
+ read() {},
- assert.strictEqual(_duplex5.destroyed, true);
+ destroy: common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb()
+ })
+ })
+ duplex.resume()
+ duplex.on('end', common.mustNotCall('no end event'))
+ duplex.on('finish', common.mustNotCall('no finish event')) // Error is swallowed by the custom _destroy
+
+ duplex.on('error', common.mustNotCall('no error event'))
+ duplex.on('close', common.mustCall())
+ duplex.destroy(expected)
+ assert.strictEqual(duplex.destroyed, true)
}
{
- var _duplex6 = new Duplex({
- write: function write(chunk, enc, cb) {
- cb();
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
},
- read: function read() {}
- });
-
- var _expected3 = new Error('kaboom');
-
- _duplex6._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, null);
- cb(_expected3);
- });
-
- _duplex6.on('finish', common.mustNotCall('no finish event'));
- _duplex6.on('end', common.mustNotCall('no end event'));
-
- _duplex6.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, _expected3);
- }));
-
- _duplex6.destroy();
-
- assert.strictEqual(_duplex6.destroyed, true);
+ read() {}
+ })
+ duplex._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb()
+ })
+ duplex.destroy()
+ assert.strictEqual(duplex.destroyed, true)
}
{
- var _duplex7 = new Duplex({
- write: function write(chunk, enc, cb) {
- cb();
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
},
- read: function read() {},
- allowHalfOpen: true
- });
-
- _duplex7.resume();
- _duplex7.on('finish', common.mustNotCall());
-
- _duplex7.on('end', common.mustNotCall());
-
- _duplex7.destroy();
+ read() {}
+ })
+ duplex.resume()
+ duplex._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ process.nextTick(() => {
+ this.push(null)
+ this.end()
+ cb()
+ })
+ })
+ const fail = common.mustNotCall('no finish or end event')
+ duplex.on('finish', fail)
+ duplex.on('end', fail)
+ duplex.destroy()
+ duplex.removeListener('end', fail)
+ duplex.removeListener('finish', fail)
+ duplex.on('end', common.mustNotCall())
+ duplex.on('finish', common.mustNotCall())
+ assert.strictEqual(duplex.destroyed, true)
+}
+{
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
- assert.strictEqual(_duplex7.destroyed, true);
+ read() {}
+ })
+ const expected = new Error('kaboom')
+ duplex._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb(expected)
+ })
+ duplex.on('finish', common.mustNotCall('no finish event'))
+ duplex.on('end', common.mustNotCall('no end event'))
+ duplex.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ duplex.destroy()
+ assert.strictEqual(duplex.destroyed, true)
}
{
- var _duplex8 = new Duplex({
- write: function write(chunk, enc, cb) {
- cb();
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
},
- read: function read() {}
- });
- _duplex8.destroyed = true;
- assert.strictEqual(_duplex8.destroyed, true); // the internal destroy() mechanism should not be triggered
+ read() {},
- _duplex8.on('finish', common.mustNotCall());
+ allowHalfOpen: true
+ })
+ duplex.resume()
+ duplex.on('finish', common.mustNotCall())
+ duplex.on('end', common.mustNotCall())
+ duplex.destroy()
+ assert.strictEqual(duplex.destroyed, true)
+}
+{
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
- _duplex8.on('end', common.mustNotCall());
+ read() {}
+ })
+ duplex.destroyed = true
+ assert.strictEqual(duplex.destroyed, true) // The internal destroy() mechanism should not be triggered
- _duplex8.destroy();
+ duplex.on('finish', common.mustNotCall())
+ duplex.on('end', common.mustNotCall())
+ duplex.destroy()
}
{
function MyDuplex() {
- assert.strictEqual(this.destroyed, false);
- this.destroyed = false;
- Duplex.call(this);
+ assert.strictEqual(this.destroyed, false)
+ this.destroyed = false
+ Duplex.call(this)
}
- Object.setPrototypeOf(MyDuplex.prototype, Duplex.prototype);
- Object.setPrototypeOf(MyDuplex, Duplex);
- new MyDuplex();
+ Object.setPrototypeOf(MyDuplex.prototype, Duplex.prototype)
+ Object.setPrototypeOf(MyDuplex, Duplex)
+ new MyDuplex()
+}
+{
+ const duplex = new Duplex({
+ writable: false,
+ autoDestroy: true,
+
+ write(chunk, enc, cb) {
+ cb()
+ },
+
+ read() {}
+ })
+ duplex.push(null)
+ duplex.resume()
+ duplex.on('close', common.mustCall())
+}
+{
+ const duplex = new Duplex({
+ readable: false,
+ autoDestroy: true,
+
+ write(chunk, enc, cb) {
+ cb()
+ },
+
+ read() {}
+ })
+ duplex.end()
+ duplex.on('close', common.mustCall())
}
-;
+{
+ const duplex = new Duplex({
+ allowHalfOpen: false,
+ autoDestroy: true,
-(function () {
- var t = require('tap');
+ write(chunk, enc, cb) {
+ cb()
+ },
- t.pass('sync run');
-})();
+ read() {}
+ })
+ duplex.push(null)
+ duplex.resume()
+ const orgEnd = duplex.end
+ duplex.end = common.mustNotCall()
+ duplex.on('end', () => {
+ // Ensure end() is called in next tick to allow
+ // any pending writes to be invoked first.
+ process.nextTick(() => {
+ duplex.end = common.mustCall(orgEnd)
+ })
+ })
+ duplex.on('close', common.mustCall())
+}
+{
+ // Check abort signal
+ const controller = new AbortController()
+ const { signal } = controller
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
-var _list = process.listeners('uncaughtException');
+ read() {},
-process.removeAllListeners('uncaughtException');
+ signal
+ })
+ let count = 0
+ duplex.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(count++, 0) // Ensure not called twice
-_list.pop();
+ assert.strictEqual(e.name, 'AbortError')
+ })
+ )
+ duplex.on('close', common.mustCall())
+ controller.abort()
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-end.js b/test/parallel/test-stream-duplex-end.js
index 194648950a..743caed878 100644
--- a/test/parallel/test-stream-duplex-end.js
+++ b/test/parallel/test-stream-duplex-end.js
@@ -1,73 +1,60 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var Duplex = require('../../').Duplex;
+const Duplex = require('../../lib/ours/index').Duplex
{
- var stream = new Duplex({
- read: function read() {}
- });
- assert.strictEqual(stream.allowHalfOpen, true);
- stream.on('finish', common.mustNotCall());
- assert.strictEqual(stream.listenerCount('end'), 0);
- stream.resume();
- stream.push(null);
+ const stream = new Duplex({
+ read() {}
+ })
+ assert.strictEqual(stream.allowHalfOpen, true)
+ stream.on('finish', common.mustNotCall())
+ assert.strictEqual(stream.listenerCount('end'), 0)
+ stream.resume()
+ stream.push(null)
}
{
- var _stream = new Duplex({
- read: function read() {},
- allowHalfOpen: false
- });
-
- assert.strictEqual(_stream.allowHalfOpen, false);
-
- _stream.on('finish', common.mustCall());
-
- assert.strictEqual(_stream.listenerCount('end'), 1);
+ const stream = new Duplex({
+ read() {},
- _stream.resume();
-
- _stream.push(null);
+ allowHalfOpen: false
+ })
+ assert.strictEqual(stream.allowHalfOpen, false)
+ stream.on('finish', common.mustCall())
+ assert.strictEqual(stream.listenerCount('end'), 0)
+ stream.resume()
+ stream.push(null)
}
{
- var _stream2 = new Duplex({
- read: function read() {},
- allowHalfOpen: false
- });
-
- assert.strictEqual(_stream2.allowHalfOpen, false);
- _stream2._writableState.ended = true;
-
- _stream2.on('finish', common.mustNotCall());
+ const stream = new Duplex({
+ read() {},
- assert.strictEqual(_stream2.listenerCount('end'), 1);
-
- _stream2.resume();
-
- _stream2.push(null);
+ allowHalfOpen: false
+ })
+ assert.strictEqual(stream.allowHalfOpen, false)
+ stream._writableState.ended = true
+ stream.on('finish', common.mustNotCall())
+ assert.strictEqual(stream.listenerCount('end'), 0)
+ stream.resume()
+ stream.push(null)
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-from.js b/test/parallel/test-stream-duplex-from.js
new file mode 100644
index 0000000000..6c10108046
--- /dev/null
+++ b/test/parallel/test-stream-duplex-from.js
@@ -0,0 +1,346 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Duplex, Readable, Writable, pipeline } = require('../../lib/ours/index')
+
+const Blob = globalThis.Blob || require('buffer').Blob
+
+{
+ const d = Duplex.from({
+ readable: new Readable({
+ read() {
+ this.push('asd')
+ this.push(null)
+ }
+ })
+ })
+ assert.strictEqual(d.readable, true)
+ assert.strictEqual(d.writable, false)
+ d.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(d.read().toString(), 'asd')
+ })
+ )
+ d.once(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+}
+{
+ const d = Duplex.from(
+ new Readable({
+ read() {
+ this.push('asd')
+ this.push(null)
+ }
+ })
+ )
+ assert.strictEqual(d.readable, true)
+ assert.strictEqual(d.writable, false)
+ d.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(d.read().toString(), 'asd')
+ })
+ )
+ d.once(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+}
+{
+ let ret = ''
+ const d = Duplex.from(
+ new Writable({
+ write(chunk, encoding, callback) {
+ ret += chunk
+ callback()
+ }
+ })
+ )
+ assert.strictEqual(d.readable, false)
+ assert.strictEqual(d.writable, true)
+ d.end('asd')
+ d.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(d.writable, false)
+ assert.strictEqual(ret, 'asd')
+ })
+ )
+}
+{
+ let ret = ''
+ const d = Duplex.from({
+ writable: new Writable({
+ write(chunk, encoding, callback) {
+ ret += chunk
+ callback()
+ }
+ })
+ })
+ assert.strictEqual(d.readable, false)
+ assert.strictEqual(d.writable, true)
+ d.end('asd')
+ d.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(d.writable, false)
+ assert.strictEqual(ret, 'asd')
+ })
+ )
+}
+{
+ let ret = ''
+ const d = Duplex.from({
+ readable: new Readable({
+ read() {
+ this.push('asd')
+ this.push(null)
+ }
+ }),
+ writable: new Writable({
+ write(chunk, encoding, callback) {
+ ret += chunk
+ callback()
+ }
+ })
+ })
+ assert.strictEqual(d.readable, true)
+ assert.strictEqual(d.writable, true)
+ d.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(d.read().toString(), 'asd')
+ })
+ )
+ d.once(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+ d.end('asd')
+ d.once(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(d.writable, false)
+ assert.strictEqual(ret, 'asd')
+ })
+ )
+}
+{
+ const d = Duplex.from(Promise.resolve('asd'))
+ assert.strictEqual(d.readable, true)
+ assert.strictEqual(d.writable, false)
+ d.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(d.read().toString(), 'asd')
+ })
+ )
+ d.once(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+}
+{
+ // https://github.com/nodejs/node/issues/40497
+ pipeline(
+ ['abc\ndef\nghi'],
+ Duplex.from(async function* (source) {
+ let rest = ''
+
+ for await (const chunk of source) {
+ const lines = (rest + chunk.toString()).split('\n')
+ rest = lines.pop()
+
+ for (const line of lines) {
+ yield line
+ }
+ }
+
+ yield rest
+ }),
+ async function* (source) {
+ // eslint-disable-line require-yield
+ let ret = ''
+
+ for await (const x of source) {
+ ret += x
+ }
+
+ assert.strictEqual(ret, 'abcdefghi')
+ },
+ common.mustCall(() => {})
+ )
+} // Ensure that isDuplexNodeStream was called
+
+{
+ const duplex = new Duplex()
+ assert.strictEqual(Duplex.from(duplex), duplex)
+} // Ensure that Duplex.from works for blobs
+
+if (typeof Blob !== 'undefined') {
+ const blob = new Blob(['blob'])
+ const expectedByteLength = blob.size
+ const duplex = Duplex.from(blob)
+ duplex.on(
+ 'data',
+ common.mustCall((arrayBuffer) => {
+ assert.strictEqual(arrayBuffer.byteLength, expectedByteLength)
+ })
+ )
+} // Ensure that given a promise rejection it emits an error
+
+{
+ const myErrorMessage = 'myCustomError'
+ Duplex.from(Promise.reject(myErrorMessage)).on(
+ 'error',
+ common.mustCall((error) => {
+ assert.strictEqual(error, myErrorMessage)
+ })
+ )
+} // Ensure that given a promise rejection on an async function it emits an error
+
+{
+ const myErrorMessage = 'myCustomError'
+
+ async function asyncFn() {
+ return Promise.reject(myErrorMessage)
+ }
+
+ Duplex.from(asyncFn).on(
+ 'error',
+ common.mustCall((error) => {
+ assert.strictEqual(error, myErrorMessage)
+ })
+ )
+} // Ensure that Duplex.from throws an Invalid return value when function is void
+
+{
+ assert.throws(() => Duplex.from(() => {}), {
+ code: 'ERR_INVALID_RETURN_VALUE'
+ })
+} // Ensure data if a sub object has a readable stream it's duplexified
+
+{
+ const msg = Buffer.from('hello')
+ const duplex = Duplex.from({
+ readable: Readable({
+ read() {
+ this.push(msg)
+ this.push(null)
+ }
+ })
+ }).on(
+ 'data',
+ common.mustCall((data) => {
+ assert.strictEqual(data, msg)
+ })
+ )
+ assert.strictEqual(duplex.writable, false)
+} // Ensure data if a sub object has a writable stream it's duplexified
+
+{
+ const msg = Buffer.from('hello')
+ const duplex = Duplex.from({
+ writable: Writable({
+ write: common.mustCall((data) => {
+ assert.strictEqual(data, msg)
+ })
+ })
+ })
+ duplex.write(msg)
+ assert.strictEqual(duplex.readable, false)
+} // Ensure data if a sub object has a writable and readable stream it's duplexified
+
+{
+ const msg = Buffer.from('hello')
+ const duplex = Duplex.from({
+ readable: Readable({
+ read() {
+ this.push(msg)
+ this.push(null)
+ }
+ }),
+ writable: Writable({
+ write: common.mustCall((data) => {
+ assert.strictEqual(data, msg)
+ })
+ })
+ })
+ duplex
+ .pipe(duplex)
+ .on(
+ 'data',
+ common.mustCall((data) => {
+ assert.strictEqual(data, msg)
+ assert.strictEqual(duplex.readable, true)
+ assert.strictEqual(duplex.writable, true)
+ })
+ )
+ .on('end', common.mustCall())
+} // Ensure that given readable stream that throws an error it calls destroy
+
+{
+ const myErrorMessage = 'error!'
+ const duplex = Duplex.from(
+ Readable({
+ read() {
+ throw new Error(myErrorMessage)
+ }
+ })
+ )
+ duplex.on(
+ 'error',
+ common.mustCall((msg) => {
+ assert.strictEqual(msg.message, myErrorMessage)
+ })
+ )
+} // Ensure that given writable stream that throws an error it calls destroy
+
+{
+ const myErrorMessage = 'error!'
+ const duplex = Duplex.from(
+ Writable({
+ write(chunk, enc, cb) {
+ cb(myErrorMessage)
+ }
+ })
+ )
+ duplex.on(
+ 'error',
+ common.mustCall((msg) => {
+ assert.strictEqual(msg, myErrorMessage)
+ })
+ )
+ duplex.write('test')
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-props.js b/test/parallel/test-stream-duplex-props.js
new file mode 100644
index 0000000000..ed2528204a
--- /dev/null
+++ b/test/parallel/test-stream-duplex-props.js
@@ -0,0 +1,47 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+require('../common')
+
+const assert = require('assert')
+
+const { Duplex } = require('../../lib/ours/index')
+
+{
+ const d = new Duplex({
+ objectMode: true,
+ highWaterMark: 100
+ })
+ assert.strictEqual(d.writableObjectMode, true)
+ assert.strictEqual(d.writableHighWaterMark, 100)
+ assert.strictEqual(d.readableObjectMode, true)
+ assert.strictEqual(d.readableHighWaterMark, 100)
+}
+{
+ const d = new Duplex({
+ readableObjectMode: false,
+ readableHighWaterMark: 10,
+ writableObjectMode: true,
+ writableHighWaterMark: 100
+ })
+ assert.strictEqual(d.writableObjectMode, true)
+ assert.strictEqual(d.writableHighWaterMark, 100)
+ assert.strictEqual(d.readableObjectMode, false)
+ assert.strictEqual(d.readableHighWaterMark, 10)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-readable-end.js b/test/parallel/test-stream-duplex-readable-end.js
new file mode 100644
index 0000000000..1fd1e8966a
--- /dev/null
+++ b/test/parallel/test-stream-duplex-readable-end.js
@@ -0,0 +1,46 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+} // https://github.com/nodejs/node/issues/35926
+const common = require('../common')
+
+const assert = require('assert')
+
+const stream = require('../../lib/ours/index')
+
+let loops = 5
+const src = new stream.Readable({
+ read() {
+ if (loops--) this.push(Buffer.alloc(20000))
+ }
+})
+const dst = new stream.Transform({
+ transform(chunk, output, fn) {
+ this.push(null)
+ fn()
+ }
+})
+src.pipe(dst)
+dst.on('data', () => {})
+dst.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(loops, 3)
+ assert.ok(src.isPaused())
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-readable-writable.js b/test/parallel/test-stream-duplex-readable-writable.js
new file mode 100644
index 0000000000..ba752d9298
--- /dev/null
+++ b/test/parallel/test-stream-duplex-readable-writable.js
@@ -0,0 +1,71 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Duplex } = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+{
+ const duplex = new Duplex({
+ readable: false
+ })
+ assert.strictEqual(duplex.readable, false)
+ duplex.push('asd')
+ duplex.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PUSH_AFTER_EOF')
+ })
+ )
+ duplex.on('data', common.mustNotCall())
+ duplex.on('end', common.mustNotCall())
+}
+{
+ const duplex = new Duplex({
+ writable: false,
+ write: common.mustNotCall()
+ })
+ assert.strictEqual(duplex.writable, false)
+ duplex.write('asd')
+ duplex.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ duplex.on('finish', common.mustNotCall())
+}
+{
+ const duplex = new Duplex({
+ readable: false
+ })
+ assert.strictEqual(duplex.readable, false)
+ duplex.on('data', common.mustNotCall())
+ duplex.on('end', common.mustNotCall())
+
+ async function run() {
+ for await (const chunk of duplex) {
+ assert(false, chunk)
+ }
+ }
+
+ run().then(common.mustCall())
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-writable-finished.js b/test/parallel/test-stream-duplex-writable-finished.js
new file mode 100644
index 0000000000..e84abe9cfb
--- /dev/null
+++ b/test/parallel/test-stream-duplex-writable-finished.js
@@ -0,0 +1,52 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Duplex } = require('../../lib/ours/index')
+
+const assert = require('assert') // basic
+
+{
+ // Find it on Duplex.prototype
+ assert(Reflect.has(Duplex.prototype, 'writableFinished'))
+} // event
+
+{
+ const duplex = new Duplex()
+
+ duplex._write = (chunk, encoding, cb) => {
+ // The state finished should start in false.
+ assert.strictEqual(duplex.writableFinished, false)
+ cb()
+ }
+
+ duplex.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(duplex.writableFinished, true)
+ })
+ )
+ duplex.end(
+ 'testing finished state',
+ common.mustCall(() => {
+ assert.strictEqual(duplex.writableFinished, true)
+ })
+ )
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex.js b/test/parallel/test-stream-duplex.js
index c32c7553d0..0c44a8cfce 100644
--- a/test/parallel/test-stream-duplex.js
+++ b/test/parallel/test-stream-duplex.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,63 +18,59 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var Duplex = require('../../').Duplex;
+const Duplex = require('../../lib/ours/index').Duplex
-var stream = new Duplex({
+const stream = new Duplex({
objectMode: true
-});
-assert(Duplex() instanceof Duplex);
-assert(stream._readableState.objectMode);
-assert(stream._writableState.objectMode);
-assert(stream.allowHalfOpen);
-assert.strictEqual(stream.listenerCount('end'), 0);
-var written;
-var read;
-
-stream._write = function (obj, _, cb) {
- written = obj;
- cb();
-};
-
-stream._read = function () {};
-
-stream.on('data', function (obj) {
- read = obj;
-});
+})
+assert(Duplex() instanceof Duplex)
+assert(stream._readableState.objectMode)
+assert(stream._writableState.objectMode)
+assert(stream.allowHalfOpen)
+assert.strictEqual(stream.listenerCount('end'), 0)
+let written
+let read
+
+stream._write = (obj, _, cb) => {
+ written = obj
+ cb()
+}
+
+stream._read = () => {}
+
+stream.on('data', (obj) => {
+ read = obj
+})
stream.push({
val: 1
-});
+})
stream.end({
val: 2
-});
-process.on('exit', function () {
- assert.strictEqual(read.val, 1);
- assert.strictEqual(written.val, 2);
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+process.on('exit', () => {
+ assert.strictEqual(read.val, 1)
+ assert.strictEqual(written.val, 2)
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-end-of-streams.js b/test/parallel/test-stream-end-of-streams.js
new file mode 100644
index 0000000000..f731a4d0e8
--- /dev/null
+++ b/test/parallel/test-stream-end-of-streams.js
@@ -0,0 +1,40 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+require('../common')
+
+const assert = require('assert')
+
+const { Duplex, finished } = require('../../lib/ours/index')
+
+assert.throws(
+ () => {
+ // Passing empty object to mock invalid stream
+ // should throw error
+ finished({}, () => {})
+ },
+ {
+ code: 'ERR_INVALID_ARG_TYPE'
+ }
+)
+const streamObj = new Duplex()
+streamObj.end() // Below code should not throw any errors as the
+// streamObj is `Stream`
+
+finished(streamObj, () => {})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-end-paused.js b/test/parallel/test-stream-end-paused.js
index 71e4460b0b..f1478d685b 100644
--- a/test/parallel/test-stream-end-paused.js
+++ b/test/parallel/test-stream-end-paused.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,53 +18,52 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
+const tap = require('tap')
-var common = require('../common');
+const silentConsole = {
+ log() {},
-var assert = require('assert/'); // Make sure we don't miss the end event for paused 0-length streams
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert') // Make sure we don't miss the end event for paused 0-length streams
-var Readable = require('../../').Readable;
+const Readable = require('../../lib/ours/index').Readable
-var stream = new Readable();
-var calledRead = false;
+const stream = new Readable()
+let calledRead = false
stream._read = function () {
- assert(!calledRead);
- calledRead = true;
- this.push(null);
-};
+ assert(!calledRead)
+ calledRead = true
+ this.push(null)
+}
stream.on('data', function () {
- throw new Error('should not ever get data');
-});
-stream.pause();
-setTimeout(common.mustCall(function () {
- stream.on('end', common.mustCall());
- stream.resume();
-}), 1);
+ throw new Error('should not ever get data')
+})
+stream.pause()
+setTimeout(
+ common.mustCall(function () {
+ stream.on('end', common.mustCall())
+ stream.resume()
+ }),
+ 1
+)
process.on('exit', function () {
- assert(calledRead);
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert(calledRead)
+ silentConsole.log('ok')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-error-once.js b/test/parallel/test-stream-error-once.js
new file mode 100644
index 0000000000..435f9b0b9d
--- /dev/null
+++ b/test/parallel/test-stream-error-once.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Writable, Readable } = require('../../lib/ours/index')
+
+{
+ const writable = new Writable()
+ writable.on('error', common.mustCall())
+ writable.end()
+ writable.write('h')
+ writable.write('h')
+}
+{
+ const readable = new Readable()
+ readable.on('error', common.mustCall())
+ readable.push(null)
+ readable.push('h')
+ readable.push('h')
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-events-prepend.js b/test/parallel/test-stream-events-prepend.js
index adfe99df5c..861887c7db 100644
--- a/test/parallel/test-stream-events-prepend.js
+++ b/test/parallel/test-stream-events-prepend.js
@@ -1,94 +1,44 @@
-"use strict";
+'use strict'
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+const tap = require('tap')
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+const silentConsole = {
+ log() {},
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+ error() {}
+}
+const common = require('../common')
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+const stream = require('../../lib/ours/index')
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var stream = require('../../');
-
-var Writable =
-/*#__PURE__*/
-function (_stream$Writable) {
- _inherits(Writable, _stream$Writable);
-
- function Writable() {
- var _this;
-
- _classCallCheck(this, Writable);
-
- _this = _possibleConstructorReturn(this, _getPrototypeOf(Writable).call(this));
- _this.prependListener = undefined;
- return _this;
+class Writable extends stream.Writable {
+ constructor() {
+ super()
+ this.prependListener = undefined
}
- _createClass(Writable, [{
- key: "_write",
- value: function _write(chunk, end, cb) {
- cb();
- }
- }]);
-
- return Writable;
-}(stream.Writable);
-
-var Readable =
-/*#__PURE__*/
-function (_stream$Readable) {
- _inherits(Readable, _stream$Readable);
-
- function Readable() {
- _classCallCheck(this, Readable);
-
- return _possibleConstructorReturn(this, _getPrototypeOf(Readable).apply(this, arguments));
+ _write(chunk, end, cb) {
+ cb()
}
+}
- _createClass(Readable, [{
- key: "_read",
- value: function _read() {
- this.push(null);
- }
- }]);
-
- return Readable;
-}(stream.Readable);
-
-var w = new Writable();
-w.on('pipe', common.mustCall());
-var r = new Readable();
-r.pipe(w);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+class Readable extends stream.Readable {
+ _read() {
+ this.push(null)
+ }
+}
+
+const w = new Writable()
+w.on('pipe', common.mustCall())
+const r = new Readable()
+r.pipe(w)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-filter.js b/test/parallel/test-stream-filter.js
new file mode 100644
index 0000000000..73571bf4e5
--- /dev/null
+++ b/test/parallel/test-stream-filter.js
@@ -0,0 +1,236 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+const { once } = require('events')
+
+const st = require('timers').setTimeout
+
+function setTimeout(ms) {
+ return new Promise((resolve) => {
+ st(resolve, ms)
+ })
+}
+
+{
+ // Filter works on synchronous streams with a synchronous predicate
+ const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => x < 3)
+ const result = [1, 2]
+ ;(async () => {
+ for await (const item of stream) {
+ assert.strictEqual(item, result.shift())
+ }
+ })().then(common.mustCall())
+}
+{
+ // Filter works on synchronous streams with an asynchronous predicate
+ const stream = Readable.from([1, 2, 3, 4, 5]).filter(async (x) => {
+ await Promise.resolve()
+ return x > 3
+ })
+ const result = [4, 5]
+ ;(async () => {
+ for await (const item of stream) {
+ assert.strictEqual(item, result.shift())
+ }
+ })().then(common.mustCall())
+}
+{
+ // Map works on asynchronous streams with a asynchronous mapper
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ .map(async (x) => {
+ await Promise.resolve()
+ return x + x
+ })
+ .filter((x) => x > 5)
+ const result = [6, 8, 10]
+ ;(async () => {
+ for await (const item of stream) {
+ assert.strictEqual(item, result.shift())
+ }
+ })().then(common.mustCall())
+}
+{
+ // Filter works on an infinite stream
+ const stream = Readable.from(
+ (async function* () {
+ while (true) yield 1
+ })()
+ ).filter(
+ common.mustCall(async (x) => {
+ return x < 3
+ }, 5)
+ )
+ ;(async () => {
+ let i = 1
+
+ for await (const item of stream) {
+ assert.strictEqual(item, 1)
+ if (++i === 5) break
+ }
+ })().then(common.mustCall())
+}
+{
+ // Filter works on constructor created streams
+ let i = 0
+ const stream = new Readable({
+ read() {
+ if (i === 10) {
+ this.push(null)
+ return
+ }
+
+ this.push(Uint8Array.from([i]))
+ i++
+ },
+
+ highWaterMark: 0
+ }).filter(
+ common.mustCall(async ([x]) => {
+ return x !== 5
+ }, 10)
+ )
+ ;(async () => {
+ const result = (await stream.toArray()).map((x) => x[0])
+ const expected = [...Array(10).keys()].filter((x) => x !== 5)
+ assert.deepStrictEqual(result, expected)
+ })().then(common.mustCall())
+}
+{
+ // Throwing an error during `filter` (sync)
+ const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => {
+ if (x === 3) {
+ throw new Error('boom')
+ }
+
+ return true
+ })
+ assert.rejects(stream.map((x) => x + x).toArray(), /boom/).then(common.mustCall())
+}
+{
+ // Throwing an error during `filter` (async)
+ const stream = Readable.from([1, 2, 3, 4, 5]).filter(async (x) => {
+ if (x === 3) {
+ throw new Error('boom')
+ }
+
+ return true
+ })
+ assert.rejects(stream.filter(() => true).toArray(), /boom/).then(common.mustCall())
+}
+{
+ // Concurrency + AbortSignal
+ const ac = new AbortController()
+ let calls = 0
+ const stream = Readable.from([1, 2, 3, 4]).filter(
+ async (_, { signal }) => {
+ calls++
+ await once(signal, 'abort')
+ },
+ {
+ signal: ac.signal,
+ concurrency: 2
+ }
+ ) // pump
+
+ assert
+ .rejects(
+ async () => {
+ for await (const item of stream) {
+ // nope
+ silentConsole.log(item)
+ }
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+ setImmediate(() => {
+ ac.abort()
+ assert.strictEqual(calls, 2)
+ })
+}
+{
+ // Concurrency result order
+ const stream = Readable.from([1, 2]).filter(
+ async (item, { signal }) => {
+ await setTimeout(10 - item, {
+ signal
+ })
+ return true
+ },
+ {
+ concurrency: 2
+ }
+ )
+ ;(async () => {
+ const expected = [1, 2]
+
+ for await (const item of stream) {
+ assert.strictEqual(item, expected.shift())
+ }
+ })().then(common.mustCall())
+}
+{
+ // Error cases
+ assert.throws(() => Readable.from([1]).filter(1), /ERR_INVALID_ARG_TYPE/)
+ assert.throws(
+ () =>
+ Readable.from([1]).filter((x) => x, {
+ concurrency: 'Foo'
+ }),
+ /ERR_OUT_OF_RANGE/
+ )
+ assert.throws(() => Readable.from([1]).filter((x) => x, 1), /ERR_INVALID_ARG_TYPE/)
+}
+{
+ // Test result is a Readable
+ const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => true)
+ assert.strictEqual(stream.readable, true)
+}
+{
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ Object.defineProperty(stream, 'map', {
+ value: common.mustNotCall(() => {})
+ }) // Check that map isn't getting called.
+
+ stream.filter(() => true)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-finished.js b/test/parallel/test-stream-finished.js
index 086962c8b1..2b9d9c400d 100644
--- a/test/parallel/test-stream-finished.js
+++ b/test/parallel/test-stream-finished.js
@@ -1,177 +1,778 @@
-"use strict";
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
-function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
-function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+const { Writable, Readable, Transform, finished, Duplex, PassThrough, Stream } = require('../../lib/ours/index')
-var common = require('../common');
+const assert = require('assert')
-var _require = require('../../'),
- Writable = _require.Writable,
- Readable = _require.Readable,
- Transform = _require.Transform,
- finished = _require.finished;
+const EE = require('events')
-var assert = require('assert/');
+const fs = require('fs')
-var fs = require('fs');
+const { promisify } = require('util')
-var promisify = require('util-promisify');
+const http = require('http')
{
- var rs = new Readable({
- read: function read() {}
- });
- finished(rs, common.mustCall(function (err) {
- assert(!err, 'no error');
- }));
- rs.push(null);
- rs.resume();
+ const rs = new Readable({
+ read() {}
+ })
+ finished(rs, common.mustSucceed())
+ rs.push(null)
+ rs.resume()
}
{
- var ws = new Writable({
- write: function write(data, enc, cb) {
- cb();
+ const ws = new Writable({
+ write(data, enc, cb) {
+ cb()
}
- });
- finished(ws, common.mustCall(function (err) {
- assert(!err, 'no error');
- }));
- ws.end();
+ })
+ finished(ws, common.mustSucceed())
+ ws.end()
}
{
- var tr = new Transform({
- transform: function transform(data, enc, cb) {
- cb();
+ const tr = new Transform({
+ transform(data, enc, cb) {
+ cb()
}
- });
- var finish = false;
- var ended = false;
- tr.on('end', function () {
- ended = true;
- });
- tr.on('finish', function () {
- finish = true;
- });
- finished(tr, common.mustCall(function (err) {
- assert(!err, 'no error');
- assert(finish);
- assert(ended);
- }));
- tr.end();
- tr.resume();
+ })
+ let finish = false
+ let ended = false
+ tr.on('end', () => {
+ ended = true
+ })
+ tr.on('finish', () => {
+ finish = true
+ })
+ finished(
+ tr,
+ common.mustSucceed(() => {
+ assert(finish)
+ assert(ended)
+ })
+ )
+ tr.end()
+ tr.resume()
}
{
- var _rs = fs.createReadStream(__filename);
-
- _rs.resume();
-
- finished(_rs, common.mustCall());
+ const rs = fs.createReadStream(__filename)
+ rs.resume()
+ finished(rs, common.mustCall())
}
{
- var finishedPromise = promisify(finished);
-
- function run() {
- return _run.apply(this, arguments);
+ const finishedPromise = promisify(finished)
+
+ async function run() {
+ const rs = fs.createReadStream(__filename)
+ const done = common.mustCall()
+ let ended = false
+ rs.resume()
+ rs.on('end', () => {
+ ended = true
+ })
+ await finishedPromise(rs)
+ assert(ended)
+ done()
}
- function _run() {
- _run = _asyncToGenerator(function* () {
- var rs = fs.createReadStream(__filename);
- var done = common.mustCall();
- var ended = false;
- rs.resume();
- rs.on('end', function () {
- ended = true;
- });
- yield finishedPromise(rs);
- assert(ended);
- done();
- });
- return _run.apply(this, arguments);
+ run()
+}
+{
+ // Check pre-cancelled
+ const signal = new EventTarget()
+ signal.aborted = true
+ const rs = Readable.from((function* () {})())
+ finished(
+ rs,
+ {
+ signal
+ },
+ common.mustCall((err) => {
+ assert.strictEqual(err.name, 'AbortError')
+ })
+ )
+}
+{
+ // Check cancelled before the stream ends sync.
+ const ac = new AbortController()
+ const { signal } = ac
+ const rs = Readable.from((function* () {})())
+ finished(
+ rs,
+ {
+ signal
+ },
+ common.mustCall((err) => {
+ assert.strictEqual(err.name, 'AbortError')
+ })
+ )
+ ac.abort()
+}
+{
+ // Check cancelled before the stream ends async.
+ const ac = new AbortController()
+ const { signal } = ac
+ const rs = Readable.from((function* () {})())
+ setTimeout(() => ac.abort(), 1)
+ finished(
+ rs,
+ {
+ signal
+ },
+ common.mustCall((err) => {
+ assert.strictEqual(err.name, 'AbortError')
+ })
+ )
+}
+{
+ // Check cancelled after doesn't throw.
+ const ac = new AbortController()
+ const { signal } = ac
+ const rs = Readable.from(
+ (function* () {
+ yield 5
+ setImmediate(() => ac.abort())
+ })()
+ )
+ rs.resume()
+ finished(
+ rs,
+ {
+ signal
+ },
+ common.mustSucceed()
+ )
+}
+{
+ // Promisified abort works
+ const finishedPromise = promisify(finished)
+
+ async function run() {
+ const ac = new AbortController()
+ const { signal } = ac
+ const rs = Readable.from((function* () {})())
+ setImmediate(() => ac.abort())
+ await finishedPromise(rs, {
+ signal
+ })
}
- run();
+ assert
+ .rejects(run, {
+ name: 'AbortError'
+ })
+ .then(common.mustCall())
}
{
- var _rs2 = fs.createReadStream('file-does-not-exist');
+ // Promisified pre-aborted works
+ const finishedPromise = promisify(finished)
+
+ async function run() {
+ const signal = new EventTarget()
+ signal.aborted = true
+ const rs = Readable.from((function* () {})())
+ await finishedPromise(rs, {
+ signal
+ })
+ }
- finished(_rs2, common.mustCall(function (err) {
- assert.strictEqual(err.code, 'ENOENT');
- }));
+ assert
+ .rejects(run, {
+ name: 'AbortError'
+ })
+ .then(common.mustCall())
}
{
- var _rs3 = new Readable();
-
- finished(_rs3, common.mustCall(function (err) {
- assert(!err, 'no error');
- }));
-
- _rs3.push(null);
-
- _rs3.emit('close'); // should not trigger an error
-
-
- _rs3.resume();
+ const rs = fs.createReadStream('file-does-not-exist')
+ finished(
+ rs,
+ common.expectsError({
+ code: 'ENOENT'
+ })
+ )
}
{
- var _rs4 = new Readable();
-
- finished(_rs4, common.mustCall(function (err) {
- assert(err, 'premature close error');
- }));
-
- _rs4.emit('close'); // should trigger error
+ const rs = new Readable()
+ finished(rs, common.mustSucceed())
+ rs.push(null)
+ rs.emit('close') // Should not trigger an error
+ rs.resume()
+}
+{
+ const rs = new Readable()
+ finished(
+ rs,
+ common.mustCall((err) => {
+ assert(err, 'premature close error')
+ })
+ )
+ rs.emit('close') // Should trigger error
+
+ rs.push(null)
+ rs.resume()
+} // Test faulty input values and options.
- _rs4.push(null);
-
- _rs4.resume();
+{
+ const rs = new Readable({
+ read() {}
+ })
+ assert.throws(() => finished(rs, 'foo'), {
+ code: 'ERR_INVALID_ARG_TYPE',
+ message: /callback/
+ })
+ assert.throws(() => finished(rs, 'foo', () => {}), {
+ code: 'ERR_INVALID_ARG_TYPE',
+ message: /options/
+ })
+ assert.throws(() => finished(rs, {}, 'foo'), {
+ code: 'ERR_INVALID_ARG_TYPE',
+ message: /callback/
+ })
+ finished(rs, null, common.mustCall())
+ rs.push(null)
+ rs.resume()
} // Test that calling returned function removes listeners
{
- var _ws = new Writable({
- write: function write(data, env, cb) {
- cb();
+ const ws = new Writable({
+ write(data, env, cb) {
+ cb()
}
- });
-
- var removeListener = finished(_ws, common.mustNotCall());
- removeListener();
-
- _ws.end();
+ })
+ const removeListener = finished(ws, common.mustNotCall())
+ removeListener()
+ ws.end()
+}
+{
+ const rs = new Readable()
+ const removeListeners = finished(rs, common.mustNotCall())
+ removeListeners()
+ rs.emit('close')
+ rs.push(null)
+ rs.resume()
+}
+{
+ const streamLike = new EE()
+ streamLike.readableEnded = true
+ streamLike.readable = true
+ assert.throws(
+ () => {
+ finished(streamLike, () => {})
+ },
+ {
+ code: 'ERR_INVALID_ARG_TYPE'
+ }
+ )
+ streamLike.emit('close')
+}
+{
+ const writable = new Writable({
+ write() {}
+ })
+ writable.writable = false
+ writable.destroy()
+ finished(
+ writable,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+}
+{
+ const readable = new Readable()
+ readable.readable = false
+ readable.destroy()
+ finished(
+ readable,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
}
{
- var _rs5 = new Readable();
+ const w = new Writable({
+ write(chunk, encoding, callback) {
+ setImmediate(callback)
+ }
+ })
+ finished(
+ w,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+ w.end('asd')
+ w.destroy()
+}
+
+function testClosed(factory) {
+ {
+ // If already destroyed but finished is cancelled in same tick
+ // don't invoke the callback,
+ const s = factory()
+ s.destroy()
+ const dispose = finished(s, common.mustNotCall())
+ dispose()
+ }
+ {
+ // If already destroyed invoked callback.
+ const s = factory()
+ s.destroy()
+ finished(s, common.mustCall())
+ }
+ {
+ // Don't invoke until destroy has completed.
+ let destroyed = false
+ const s = factory({
+ destroy(err, cb) {
+ setImmediate(() => {
+ destroyed = true
+ cb()
+ })
+ }
+ })
+ s.destroy()
+ finished(
+ s,
+ common.mustCall(() => {
+ assert.strictEqual(destroyed, true)
+ })
+ )
+ }
+ {
+ // Invoke callback even if close is inhibited.
+ const s = factory({
+ emitClose: false,
+
+ destroy(err, cb) {
+ cb()
+ finished(s, common.mustCall())
+ }
+ })
+ s.destroy()
+ }
+ {
+ // Invoke with deep async.
+ const s = factory({
+ destroy(err, cb) {
+ setImmediate(() => {
+ cb()
+ setImmediate(() => {
+ finished(s, common.mustCall())
+ })
+ })
+ }
+ })
+ s.destroy()
+ }
+}
- var removeListeners = finished(_rs5, common.mustNotCall());
- removeListeners();
+testClosed((opts) => new Readable({ ...opts }))
+testClosed(
+ (opts) =>
+ new Writable({
+ write() {},
- _rs5.emit('close');
+ ...opts
+ })
+)
+{
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb()
+ },
+
+ autoDestroy: false
+ })
+ w.end('asd')
+ process.nextTick(() => {
+ finished(w, common.mustCall())
+ })
+}
+{
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb(new Error())
+ },
+
+ autoDestroy: false
+ })
+ w.write('asd')
+ w.on(
+ 'error',
+ common.mustCall(() => {
+ finished(w, common.mustCall())
+ })
+ )
+}
+{
+ const r = new Readable({
+ autoDestroy: false
+ })
+ r.push(null)
+ r.resume()
+ r.on(
+ 'end',
+ common.mustCall(() => {
+ finished(r, common.mustCall())
+ })
+ )
+}
+{
+ const rs = fs.createReadStream(__filename, {
+ autoClose: false
+ })
+ rs.resume()
+ rs.on('close', common.mustNotCall())
+ rs.on(
+ 'end',
+ common.mustCall(() => {
+ finished(rs, common.mustCall())
+ })
+ )
+}
+{
+ const d = new EE()
+ d._writableState = {}
+ d._writableState.finished = true
+ finished(
+ d,
+ {
+ readable: false,
+ writable: true
+ },
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+ d._writableState.errored = true
+ d.emit('close')
+}
+{
+ const r = new Readable()
+ finished(
+ r,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+ r.push('asd')
+ r.push(null)
+ r.destroy()
+}
+{
+ const d = new Duplex({
+ final(cb) {},
- _rs5.push(null);
+ // Never close writable side for test purpose
+ read() {
+ this.push(null)
+ }
+ })
+ d.on('end', common.mustCall())
+ finished(
+ d,
+ {
+ readable: true,
+ writable: false
+ },
+ common.mustCall()
+ )
+ d.end()
+ d.resume()
+}
+{
+ const d = new Duplex({
+ final(cb) {},
- _rs5.resume();
+ // Never close writable side for test purpose
+ read() {
+ this.push(null)
+ }
+ })
+ d.on('end', common.mustCall())
+ d.end()
+ finished(
+ d,
+ {
+ readable: true,
+ writable: false
+ },
+ common.mustCall()
+ )
+ d.resume()
+}
+{
+ // Test for compat for e.g. fd-slicer which implements
+ // non standard destroy behavior which might not emit
+ // 'close'.
+ const r = new Readable()
+ finished(r, common.mustCall())
+ r.resume()
+ r.push('asd')
+ r.destroyed = true
+ r.push(null)
}
-;
+{
+ // Regression https://github.com/nodejs/node/issues/33130
+ const response = new PassThrough()
+
+ class HelloWorld extends Duplex {
+ constructor(response) {
+ super({
+ autoDestroy: false
+ })
+ this.response = response
+ this.readMore = false
+ response.once('end', () => {
+ this.push(null)
+ })
+ response.on('readable', () => {
+ if (this.readMore) {
+ this._read()
+ }
+ })
+ }
-(function () {
- var t = require('tap');
+ _read() {
+ const { response } = this
+ this.readMore = true
- t.pass('sync run');
-})();
+ if (response.readableLength) {
+ this.readMore = false
+ }
-var _list = process.listeners('uncaughtException');
+ let data
-process.removeAllListeners('uncaughtException');
+ while ((data = response.read()) !== null) {
+ this.push(data)
+ }
+ }
+ }
+
+ const instance = new HelloWorld(response)
+ instance.setEncoding('utf8')
+ instance.end()
+ ;(async () => {
+ await EE.once(instance, 'finish')
+ setImmediate(() => {
+ response.write('chunk 1')
+ response.write('chunk 2')
+ response.write('chunk 3')
+ response.end()
+ })
+ let res = ''
+
+ for await (const data of instance) {
+ res += data
+ }
-_list.pop();
+ assert.strictEqual(res, 'chunk 1chunk 2chunk 3')
+ })().then(common.mustCall())
+}
+{
+ const p = new PassThrough()
+ p.end()
+ finished(p, common.mustNotCall())
+}
+{
+ const p = new PassThrough()
+ p.end()
+ p.on(
+ 'finish',
+ common.mustCall(() => {
+ finished(p, common.mustNotCall())
+ })
+ )
+}
+{
+ const server = http
+ .createServer(
+ common.mustCall((req, res) => {
+ res.on(
+ 'finish',
+ common.mustCall(() => {
+ finished(
+ res,
+ common.mustCall(() => {
+ server.close()
+ })
+ )
+ })
+ )
+ res.end()
+ })
+ )
+ .listen(0, function () {
+ http
+ .request({
+ method: 'GET',
+ port: this.address().port
+ })
+ .end()
+ .on('response', common.mustCall())
+ })
+}
+{
+ const server = http
+ .createServer(
+ common.mustCall((req, res) => {
+ req.on(
+ 'close',
+ common.mustCall(() => {
+ finished(
+ req,
+ common.mustCall(() => {
+ server.close()
+ })
+ )
+ })
+ )
+ req.destroy()
+ })
+ )
+ .listen(0, function () {
+ http
+ .request({
+ method: 'GET',
+ port: this.address().port
+ })
+ .end()
+ .on('error', common.mustCall())
+ })
+}
+{
+ const w = new Writable({
+ write(chunk, encoding, callback) {
+ process.nextTick(callback)
+ }
+ })
+ w.aborted = false
+ w.end()
+ let closed = false
+ w.on('finish', () => {
+ assert.strictEqual(closed, false)
+ w.emit('aborted')
+ })
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ closed = true
+ })
+ )
+ finished(
+ w,
+ common.mustCall(() => {
+ assert.strictEqual(closed, true)
+ })
+ )
+}
+{
+ const w = new Writable()
+
+ const _err = new Error()
+
+ w.destroy(_err)
+ assert.strictEqual(w.errored, _err)
+ finished(
+ w,
+ common.mustCall((err) => {
+ assert.strictEqual(_err, err)
+ assert.strictEqual(w.closed, true)
+ finished(
+ w,
+ common.mustCall((err) => {
+ assert.strictEqual(_err, err)
+ })
+ )
+ })
+ )
+}
+{
+ const w = new Writable()
+ w.destroy()
+ assert.strictEqual(w.errored, null)
+ finished(
+ w,
+ common.mustCall((err) => {
+ assert.strictEqual(w.closed, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ finished(
+ w,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+ })
+ )
+}
+{
+ // Legacy Streams do not inherit from Readable or Writable.
+ // We cannot really assume anything about them, so we cannot close them
+ // automatically.
+ const s = new Stream()
+ finished(s, common.mustNotCall())
+}
+{
+ const server = http
+ .createServer(
+ common.mustCall(function (req, res) {
+ fs.createReadStream(__filename).pipe(res)
+ finished(
+ res,
+ common.mustCall(function (err) {
+ assert.strictEqual(err, undefined)
+ })
+ )
+ })
+ )
+ .listen(0, function () {
+ http
+ .request(
+ {
+ method: 'GET',
+ port: this.address().port
+ },
+ common.mustCall(function (res) {
+ res.resume()
+ server.close()
+ })
+ )
+ .end()
+ })
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-flatMap.js b/test/parallel/test-stream-flatMap.js
new file mode 100644
index 0000000000..af775d1ba8
--- /dev/null
+++ b/test/parallel/test-stream-flatMap.js
@@ -0,0 +1,209 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const fixtures = require('../common/fixtures')
+
+const { Readable } = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+const st = require('timers').setTimeout
+
+function setTimeout(ms) {
+ return new Promise((resolve) => {
+ st(resolve, ms)
+ })
+}
+
+const { createReadStream } = require('fs')
+
+function oneTo5() {
+ return Readable.from([1, 2, 3, 4, 5])
+}
+
+{
+ // flatMap works on synchronous streams with a synchronous mapper
+ ;(async () => {
+ assert.deepStrictEqual(
+ await oneTo5()
+ .flatMap((x) => [x + x])
+ .toArray(),
+ [2, 4, 6, 8, 10]
+ )
+ assert.deepStrictEqual(
+ await oneTo5()
+ .flatMap(() => [])
+ .toArray(),
+ []
+ )
+ assert.deepStrictEqual(
+ await oneTo5()
+ .flatMap((x) => [x, x])
+ .toArray(),
+ [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]
+ )
+ })().then(common.mustCall())
+}
+{
+ // flatMap works on sync/async streams with an asynchronous mapper
+ ;(async () => {
+ assert.deepStrictEqual(
+ await oneTo5()
+ .flatMap(async (x) => [x, x])
+ .toArray(),
+ [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]
+ )
+ const asyncOneTo5 = oneTo5().map(async (x) => x)
+ assert.deepStrictEqual(await asyncOneTo5.flatMap(async (x) => [x, x]).toArray(), [1, 1, 2, 2, 3, 3, 4, 4, 5, 5])
+ })().then(common.mustCall())
+}
+{
+ // flatMap works on a stream where mapping returns a stream
+ ;(async () => {
+ const result = await oneTo5()
+ .flatMap(async (x) => {
+ return Readable.from([x, x])
+ })
+ .toArray()
+ assert.deepStrictEqual(result, [1, 1, 2, 2, 3, 3, 4, 4, 5, 5])
+ })().then(common.mustCall()) // flatMap works on an objectMode stream where mappign returns a stream
+
+ ;(async () => {
+ const result = await oneTo5()
+ .flatMap(() => {
+ return createReadStream(fixtures.path('x.txt'))
+ })
+ .toArray() // The resultant stream is in object mode so toArray shouldn't flatten
+
+ assert.strictEqual(result.length, 5)
+ assert.deepStrictEqual(
+ Buffer.concat(result).toString(),
+ (process.platform === 'win32' ? 'xyz\r\n' : 'xyz\n').repeat(5)
+ )
+ })().then(common.mustCall())
+}
+{
+ // Concurrency + AbortSignal
+ const ac = new AbortController()
+ const stream = oneTo5().flatMap(
+ common.mustNotCall(async (_, { signal }) => {
+ await setTimeout(100, {
+ signal
+ })
+ }),
+ {
+ signal: ac.signal,
+ concurrency: 2
+ }
+ ) // pump
+
+ assert
+ .rejects(
+ async () => {
+ for await (const item of stream) {
+ // nope
+ silentConsole.log(item)
+ }
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+ queueMicrotask(() => {
+ ac.abort()
+ })
+}
+{
+ // Already aborted AbortSignal
+ const stream = oneTo5().flatMap(
+ common.mustNotCall(async (_, { signal }) => {
+ await setTimeout(100, {
+ signal
+ })
+ }),
+ {
+ signal: AbortSignal.abort()
+ }
+ ) // pump
+
+ assert
+ .rejects(
+ async () => {
+ for await (const item of stream) {
+ // nope
+ silentConsole.log(item)
+ }
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+}
+{
+ // Error cases
+ assert.throws(() => Readable.from([1]).flatMap(1), /ERR_INVALID_ARG_TYPE/)
+ assert.throws(
+ () =>
+ Readable.from([1]).flatMap((x) => x, {
+ concurrency: 'Foo'
+ }),
+ /ERR_OUT_OF_RANGE/
+ )
+ assert.throws(() => Readable.from([1]).flatMap((x) => x, 1), /ERR_INVALID_ARG_TYPE/)
+ assert.throws(
+ () =>
+ Readable.from([1]).flatMap((x) => x, {
+ signal: true
+ }),
+ /ERR_INVALID_ARG_TYPE/
+ )
+}
+{
+ // Test result is a Readable
+ const stream = oneTo5().flatMap((x) => x)
+ assert.strictEqual(stream.readable, true)
+}
+{
+ const stream = oneTo5()
+ Object.defineProperty(stream, 'map', {
+ value: common.mustNotCall(() => {})
+ }) // Check that map isn't getting called.
+
+ stream.flatMap(() => true)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-forEach.js b/test/parallel/test-stream-forEach.js
new file mode 100644
index 0000000000..726327a51a
--- /dev/null
+++ b/test/parallel/test-stream-forEach.js
@@ -0,0 +1,209 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+const { once } = require('events')
+
+{
+ // forEach works on synchronous streams with a synchronous predicate
+ const stream = Readable.from([1, 2, 3])
+ const result = [1, 2, 3]
+ ;(async () => {
+ await stream.forEach((value) => assert.strictEqual(value, result.shift()))
+ })().then(common.mustCall())
+}
+{
+ // forEach works an asynchronous streams
+ const stream = Readable.from([1, 2, 3]).filter(async (x) => {
+ await Promise.resolve()
+ return true
+ })
+ const result = [1, 2, 3]
+ ;(async () => {
+ await stream.forEach((value) => assert.strictEqual(value, result.shift()))
+ })().then(common.mustCall())
+}
+{
+ // forEach works on asynchronous streams with a asynchronous forEach fn
+ const stream = Readable.from([1, 2, 3]).filter(async (x) => {
+ await Promise.resolve()
+ return true
+ })
+ const result = [1, 2, 3]
+ ;(async () => {
+ await stream.forEach(async (value) => {
+ await Promise.resolve()
+ assert.strictEqual(value, result.shift())
+ })
+ })().then(common.mustCall())
+}
+{
+ // forEach works on an infinite stream
+ const ac = new AbortController()
+ const { signal } = ac
+ const stream = Readable.from(
+ (async function* () {
+ while (true) yield 1
+ })(),
+ {
+ signal
+ }
+ )
+ let i = 0
+ assert
+ .rejects(
+ stream.forEach(
+ common.mustCall((x) => {
+ i++
+ if (i === 10) ac.abort()
+ assert.strictEqual(x, 1)
+ }, 10)
+ ),
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+}
+{
+ // Emitting an error during `forEach`
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ assert
+ .rejects(
+ stream.forEach(async (x) => {
+ if (x === 3) {
+ stream.emit('error', new Error('boom'))
+ }
+ }),
+ /boom/
+ )
+ .then(common.mustCall())
+}
+{
+ // Throwing an error during `forEach` (sync)
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ assert
+ .rejects(
+ stream.forEach((x) => {
+ if (x === 3) {
+ throw new Error('boom')
+ }
+ }),
+ /boom/
+ )
+ .then(common.mustCall())
+}
+{
+ // Throwing an error during `forEach` (async)
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ assert
+ .rejects(
+ stream.forEach(async (x) => {
+ if (x === 3) {
+ return Promise.reject(new Error('boom'))
+ }
+ }),
+ /boom/
+ )
+ .then(common.mustCall())
+}
+{
+ // Concurrency + AbortSignal
+ const ac = new AbortController()
+ let calls = 0
+ const forEachPromise = Readable.from([1, 2, 3, 4]).forEach(
+ async (_, { signal }) => {
+ calls++
+ await once(signal, 'abort')
+ },
+ {
+ signal: ac.signal,
+ concurrency: 2
+ }
+ ) // pump
+
+ assert
+ .rejects(
+ async () => {
+ await forEachPromise
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+ setImmediate(() => {
+ ac.abort()
+ assert.strictEqual(calls, 2)
+ })
+}
+{
+ // Error cases
+ assert
+ .rejects(async () => {
+ await Readable.from([1]).forEach(1)
+ }, /ERR_INVALID_ARG_TYPE/)
+ .then(common.mustCall())
+ assert
+ .rejects(async () => {
+ await Readable.from([1]).forEach((x) => x, {
+ concurrency: 'Foo'
+ })
+ }, /ERR_OUT_OF_RANGE/)
+ .then(common.mustCall())
+ assert
+ .rejects(async () => {
+ await Readable.from([1]).forEach((x) => x, 1)
+ }, /ERR_INVALID_ARG_TYPE/)
+ .then(common.mustCall())
+}
+{
+ // Test result is a Promise
+ const stream = Readable.from([1, 2, 3, 4, 5]).forEach((_) => true)
+ assert.strictEqual(typeof stream.then, 'function')
+}
+{
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ Object.defineProperty(stream, 'map', {
+ value: common.mustNotCall(() => {})
+ }) // Check that map isn't getting called.
+
+ stream.forEach(() => true)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-inheritance.js b/test/parallel/test-stream-inheritance.js
new file mode 100644
index 0000000000..730d9a6328
--- /dev/null
+++ b/test/parallel/test-stream-inheritance.js
@@ -0,0 +1,76 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+require('../common')
+
+const assert = require('assert')
+
+const { Readable, Writable, Duplex, Transform } = require('../../lib/ours/index')
+
+const readable = new Readable({
+ read() {}
+})
+const writable = new Writable({
+ write() {}
+})
+const duplex = new Duplex({
+ read() {},
+
+ write() {}
+})
+const transform = new Transform({
+ transform() {}
+})
+assert.ok(readable instanceof Readable)
+assert.ok(!(writable instanceof Readable))
+assert.ok(duplex instanceof Readable)
+assert.ok(transform instanceof Readable)
+assert.ok(!(readable instanceof Writable))
+assert.ok(writable instanceof Writable)
+assert.ok(duplex instanceof Writable)
+assert.ok(transform instanceof Writable)
+assert.ok(!(readable instanceof Duplex))
+assert.ok(!(writable instanceof Duplex))
+assert.ok(duplex instanceof Duplex)
+assert.ok(transform instanceof Duplex)
+assert.ok(!(readable instanceof Transform))
+assert.ok(!(writable instanceof Transform))
+assert.ok(!(duplex instanceof Transform))
+assert.ok(transform instanceof Transform)
+assert.ok(!(null instanceof Writable))
+assert.ok(!(undefined instanceof Writable)) // Simple inheritance check for `Writable` works fine in a subclass constructor.
+
+function CustomWritable() {
+ assert.ok(this instanceof CustomWritable, `${this} does not inherit from CustomWritable`)
+ assert.ok(this instanceof Writable, `${this} does not inherit from Writable`)
+}
+
+Object.setPrototypeOf(CustomWritable, Writable)
+Object.setPrototypeOf(CustomWritable.prototype, Writable.prototype)
+new CustomWritable()
+assert.throws(CustomWritable, {
+ code: 'ERR_ASSERTION',
+ constructor: assert.AssertionError,
+ message: 'undefined does not inherit from CustomWritable'
+})
+
+class OtherCustomWritable extends Writable {}
+
+assert(!(new OtherCustomWritable() instanceof CustomWritable))
+assert(!(new CustomWritable() instanceof OtherCustomWritable))
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-ispaused.js b/test/parallel/test-stream-ispaused.js
index 278f19b636..7ec86d1257 100644
--- a/test/parallel/test-stream-ispaused.js
+++ b/test/parallel/test-stream-ispaused.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,45 +18,41 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-require('../common');
-
-var assert = require('assert/');
-
-var stream = require('../../');
-
-var readable = new stream.Readable(); // _read is a noop, here.
+const tap = require('tap')
-readable._read = Function(); // default state of a stream is not "paused"
+const silentConsole = {
+ log() {},
-assert.ok(!readable.isPaused()); // make the stream start flowing...
+ error() {}
+}
+require('../common')
-readable.on('data', Function()); // still not paused.
+const assert = require('assert')
-assert.ok(!readable.isPaused());
-readable.pause();
-assert.ok(readable.isPaused());
-readable.resume();
-assert.ok(!readable.isPaused());
-;
+const stream = require('../../lib/ours/index')
-(function () {
- var t = require('tap');
+const readable = new stream.Readable() // _read is a noop, here.
- t.pass('sync run');
-})();
+readable._read = Function() // Default state of a stream is not "paused"
-var _list = process.listeners('uncaughtException');
+assert.ok(!readable.isPaused()) // Make the stream start flowing...
-process.removeAllListeners('uncaughtException');
+readable.on('data', Function()) // still not paused.
-_list.pop();
+assert.ok(!readable.isPaused())
+readable.pause()
+assert.ok(readable.isPaused())
+readable.resume()
+assert.ok(!readable.isPaused())
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-iterator-helpers-test262-tests.mjs b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs
new file mode 100644
index 0000000000..9f09abeab6
--- /dev/null
+++ b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs
@@ -0,0 +1,179 @@
+import { mustCall } from '../common/index.mjs';
+import { Readable }from '../../lib/ours/index.js';
+import assert from 'assert';
+import tap from 'tap';
+
+// These tests are manually ported from the draft PR for the test262 test suite
+// Authored by Rick Waldron in https://github.com/tc39/test262/pull/2818/files
+
+// test262 license:
+// The << Software identified by reference to the Ecma Standard* ("Software)">>
+// is protected by copyright and is being made available under the
+// "BSD License", included below. This Software may be subject to third party
+// rights (rights from parties other than Ecma International), including patent
+// rights, and no licenses under such third party rights are granted under this
+// license even if the third party concerned is a member of Ecma International.
+// SEE THE ECMA CODE OF CONDUCT IN PATENT MATTERS AVAILABLE AT
+// http://www.ecma-international.org/memento/codeofconduct.htm FOR INFORMATION
+// REGARDING THE LICENSING OF PATENT CLAIMS THAT ARE REQUIRED TO IMPLEMENT ECMA
+// INTERNATIONAL STANDARDS*
+
+// Copyright (C) 2012-2013 Ecma International
+// All rights reserved.
+
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+// 1. Redistributions of source code must retain the above copyright notice,
+// this list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+// 3. Neither the name of the authors nor Ecma International may be used to
+// endorse or promote products derived from this software without specific
+// prior written permission.
+
+// THIS SOFTWARE IS PROVIDED BY THE ECMA INTERNATIONAL "AS IS" AND ANY EXPRESS
+// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
+// NO EVENT SHALL ECMA INTERNATIONAL BE LIABLE FOR ANY DIRECT, INDIRECT,
+// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
+// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// * Ecma International Standards hereafter means Ecma International Standards
+// as well as Ecma Technical Reports
+
+
+// Note all the tests that check AsyncIterator's prototype itself and things
+// that happen before stream conversion were not ported.
+{
+ // asIndexedPairs/is-function
+ assert.strictEqual(typeof Readable.prototype.asIndexedPairs, 'function');
+ // asIndexedPairs/indexed-pairs.js
+ const iterator = Readable.from([0, 1]);
+ const indexedPairs = iterator.asIndexedPairs();
+
+ for await (const [i, v] of indexedPairs) {
+ assert.strictEqual(i, v);
+ }
+ // asIndexedPairs/length.js
+ assert.strictEqual(Readable.prototype.asIndexedPairs.length, 0);
+ // asIndexedPairs/name.js
+ assert.strictEqual(Readable.prototype.asIndexedPairs.name, 'asIndexedPairs');
+ const descriptor = Object.getOwnPropertyDescriptor(
+ Readable.prototype,
+ 'asIndexedPairs'
+ );
+ assert.strictEqual(descriptor.enumerable, false);
+ assert.strictEqual(descriptor.configurable, true);
+ assert.strictEqual(descriptor.writable, true);
+}
+{
+ // drop/length
+ assert.strictEqual(Readable.prototype.drop.length, 1);
+ const descriptor = Object.getOwnPropertyDescriptor(
+ Readable.prototype,
+ 'drop'
+ );
+ assert.strictEqual(descriptor.enumerable, false);
+ assert.strictEqual(descriptor.configurable, true);
+ assert.strictEqual(descriptor.writable, true);
+ // drop/limit-equals-total
+ const iterator = Readable.from([1, 2]).drop(2);
+ const result = await iterator[Symbol.asyncIterator]().next();
+ assert.deepStrictEqual(result, { done: true, value: undefined });
+ // drop/limit-greater-than-total.js
+ const iterator2 = Readable.from([1, 2]).drop(3);
+ const result2 = await iterator2[Symbol.asyncIterator]().next();
+ assert.deepStrictEqual(result2, { done: true, value: undefined });
+ // drop/limit-less-than-total.js
+ const iterator3 = Readable.from([1, 2]).drop(1);
+ const result3 = await iterator3[Symbol.asyncIterator]().next();
+ assert.deepStrictEqual(result3, { done: false, value: 2 });
+ // drop/limit-rangeerror
+ assert.throws(() => Readable.from([1]).drop(-1), RangeError);
+ assert.throws(() => {
+ Readable.from([1]).drop({
+ valueOf() {
+ throw new Error('boom');
+ }
+ });
+ }, /boom/);
+ // drop/limit-tointeger
+ const two = await Readable.from([1, 2]).drop({ valueOf: () => 1 }).toArray();
+ assert.deepStrictEqual(two, [2]);
+ // drop/name
+ assert.strictEqual(Readable.prototype.drop.name, 'drop');
+ // drop/non-constructible
+ assert.throws(() => new Readable.prototype.drop(1), TypeError);
+ // drop/proto
+ const proto = Object.getPrototypeOf(Readable.prototype.drop);
+ assert.strictEqual(proto, Function.prototype);
+}
+{
+ // every/abrupt-iterator-close
+ const stream = Readable.from([1, 2, 3]);
+ const e = new Error();
+ await assert.rejects(stream.every(mustCall(() => {
+ throw e;
+ }, 1)), e);
+}
+{
+ // every/callable-fn
+ await assert.rejects(Readable.from([1, 2]).every({}), TypeError);
+}
+{
+ // every/callable
+ Readable.prototype.every.call(Readable.from([]), () => {});
+ // eslint-disable-next-line array-callback-return
+ Readable.from([]).every(() => {});
+ assert.throws(() => {
+ const r = Readable.from([]);
+ new r.every(() => {});
+ }, TypeError);
+}
+
+{
+ // every/false
+ const iterator = Readable.from([1, 2, 3]);
+ const result = await iterator.every((v) => v === 1);
+ assert.strictEqual(result, false);
+}
+{
+ // every/every
+ const iterator = Readable.from([1, 2, 3]);
+ const result = await iterator.every((v) => true);
+ assert.strictEqual(result, true);
+}
+
+{
+ // every/is-function
+ assert.strictEqual(typeof Readable.prototype.every, 'function');
+}
+{
+ // every/length
+ assert.strictEqual(Readable.prototype.every.length, 1);
+ // every/name
+ assert.strictEqual(Readable.prototype.every.name, 'every');
+ // every/propdesc
+ const descriptor = Object.getOwnPropertyDescriptor(
+ Readable.prototype,
+ 'every'
+ );
+ assert.strictEqual(descriptor.enumerable, false);
+ assert.strictEqual(descriptor.configurable, true);
+ assert.strictEqual(descriptor.writable, true);
+}
+
+ /* replacement start */
+ process.on('beforeExit', (code) => {
+ if(code === 0) {
+ tap.pass('test succeeded');
+ } else {
+ tap.fail(`test failed - exited code ${code}`);
+ }
+ });
+ /* replacement end */
diff --git a/test/parallel/test-stream-objectmode-undefined.js b/test/parallel/test-stream-objectmode-undefined.js
index 0a13eb1e98..9dac7a3973 100644
--- a/test/parallel/test-stream-objectmode-undefined.js
+++ b/test/parallel/test-stream-objectmode-undefined.js
@@ -1,69 +1,64 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var _require = require('../../'),
- Readable = _require.Readable,
- Writable = _require.Writable,
- Transform = _require.Transform;
+const { Readable, Writable, Transform } = require('../../lib/ours/index')
{
- var stream = new Readable({
+ const stream = new Readable({
objectMode: true,
- read: common.mustCall(function () {
- stream.push(undefined);
- stream.push(null);
+ read: common.mustCall(() => {
+ stream.push(undefined)
+ stream.push(null)
+ })
+ })
+ stream.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk, undefined)
})
- });
- stream.on('data', common.mustCall(function (chunk) {
- assert.strictEqual(chunk, undefined);
- }));
+ )
}
{
- var _stream = new Writable({
+ const stream = new Writable({
objectMode: true,
- write: common.mustCall(function (chunk) {
- assert.strictEqual(chunk, undefined);
+ write: common.mustCall((chunk) => {
+ assert.strictEqual(chunk, undefined)
})
- });
-
- _stream.write(undefined);
+ })
+ stream.write(undefined)
}
{
- var _stream2 = new Transform({
+ const stream = new Transform({
objectMode: true,
- transform: common.mustCall(function (chunk) {
- _stream2.push(chunk);
+ transform: common.mustCall((chunk) => {
+ stream.push(chunk)
})
- });
-
- _stream2.on('data', common.mustCall(function (chunk) {
- assert.strictEqual(chunk, undefined);
- }));
-
- _stream2.write(undefined);
+ })
+ stream.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk, undefined)
+ })
+ )
+ stream.write(undefined)
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-once-readable-pipe.js b/test/parallel/test-stream-once-readable-pipe.js
index 1cdea29f0a..1f80f3455c 100644
--- a/test/parallel/test-stream-once-readable-pipe.js
+++ b/test/parallel/test-stream-once-readable-pipe.js
@@ -1,88 +1,76 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var _require = require('../../'),
- Readable = _require.Readable,
- Writable = _require.Writable; // This test ensures that if have 'readable' listener
+const { Readable, Writable } = require('../../lib/ours/index') // This test ensures that if have 'readable' listener
// on Readable instance it will not disrupt the pipe.
-
{
- var receivedData = '';
- var w = new Writable({
- write: function write(chunk, env, callback) {
- receivedData += chunk;
- callback();
+ let receivedData = ''
+ const w = new Writable({
+ write: (chunk, env, callback) => {
+ receivedData += chunk
+ callback()
}
- });
- var data = ['foo', 'bar', 'baz'];
- var r = new Readable({
- read: function read() {}
- });
- r.once('readable', common.mustCall());
- r.pipe(w);
- r.push(data[0]);
- r.push(data[1]);
- r.push(data[2]);
- r.push(null);
- w.on('finish', common.mustCall(function () {
- assert.strictEqual(receivedData, data.join(''));
- }));
+ })
+ const data = ['foo', 'bar', 'baz']
+ const r = new Readable({
+ read: () => {}
+ })
+ r.once('readable', common.mustCall())
+ r.pipe(w)
+ r.push(data[0])
+ r.push(data[1])
+ r.push(data[2])
+ r.push(null)
+ w.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(receivedData, data.join(''))
+ })
+ )
}
{
- var _receivedData = '';
-
- var _w = new Writable({
- write: function write(chunk, env, callback) {
- _receivedData += chunk;
- callback();
+ let receivedData = ''
+ const w = new Writable({
+ write: (chunk, env, callback) => {
+ receivedData += chunk
+ callback()
}
- });
-
- var _data = ['foo', 'bar', 'baz'];
-
- var _r = new Readable({
- read: function read() {}
- });
-
- _r.pipe(_w);
-
- _r.push(_data[0]);
-
- _r.push(_data[1]);
-
- _r.push(_data[2]);
-
- _r.push(null);
-
- _r.once('readable', common.mustCall());
-
- _w.on('finish', common.mustCall(function () {
- assert.strictEqual(_receivedData, _data.join(''));
- }));
+ })
+ const data = ['foo', 'bar', 'baz']
+ const r = new Readable({
+ read: () => {}
+ })
+ r.pipe(w)
+ r.push(data[0])
+ r.push(data[1])
+ r.push(data[2])
+ r.push(null)
+ r.once('readable', common.mustCall())
+ w.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(receivedData, data.join(''))
+ })
+ )
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-passthrough-drain.js b/test/parallel/test-stream-passthrough-drain.js
new file mode 100644
index 0000000000..6b1813c257
--- /dev/null
+++ b/test/parallel/test-stream-passthrough-drain.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { PassThrough } = require('../../lib/ours/index')
+
+const pt = new PassThrough({
+ highWaterMark: 0
+})
+pt.on('drain', common.mustCall())
+pt.write('hello')
+pt.read()
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-after-end.js b/test/parallel/test-stream-pipe-after-end.js
index 5e39fce11d..0825d5a0e4 100644
--- a/test/parallel/test-stream-pipe-after-end.js
+++ b/test/parallel/test-stream-pipe-after-end.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,102 +18,70 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert/');
+const tap = require('tap')
-var Readable = require('../../lib/_stream_readable');
+const silentConsole = {
+ log() {},
-var Writable = require('../../lib/_stream_writable');
+ error() {}
+}
+const common = require('../common')
-var TestReadable =
-/*#__PURE__*/
-function (_Readable) {
- _inherits(TestReadable, _Readable);
+const assert = require('assert')
- function TestReadable(opt) {
- var _this;
+const { Readable, Writable } = require('../../lib/ours/index')
- _classCallCheck(this, TestReadable);
-
- _this = _possibleConstructorReturn(this, _getPrototypeOf(TestReadable).call(this, opt));
- _this._ended = false;
- return _this;
+class TestReadable extends Readable {
+ constructor(opt) {
+ super(opt)
+ this._ended = false
}
- _createClass(TestReadable, [{
- key: "_read",
- value: function _read() {
- if (this._ended) this.emit('error', new Error('_read called twice'));
- this._ended = true;
- this.push(null);
- }
- }]);
-
- return TestReadable;
-}(Readable);
-
-var TestWritable =
-/*#__PURE__*/
-function (_Writable) {
- _inherits(TestWritable, _Writable);
-
- function TestWritable(opt) {
- var _this2;
-
- _classCallCheck(this, TestWritable);
-
- _this2 = _possibleConstructorReturn(this, _getPrototypeOf(TestWritable).call(this, opt));
- _this2._written = [];
- return _this2;
+ _read() {
+ if (this._ended) this.emit('error', new Error('_read called twice'))
+ this._ended = true
+ this.push(null)
}
+}
- _createClass(TestWritable, [{
- key: "_write",
- value: function _write(chunk, encoding, cb) {
- this._written.push(chunk);
-
- cb();
- }
- }]);
-
- return TestWritable;
-}(Writable); // this one should not emit 'end' until we read() from it later.
-
-
-var ender = new TestReadable(); // what happens when you pipe() a Readable that's already ended?
-
-var piper = new TestReadable(); // pushes EOF null, and length=0, so this will trigger 'end'
-
-piper.read();
-setTimeout(common.mustCall(function () {
- ender.on('end', common.mustCall());
- var c = ender.read();
- assert.strictEqual(c, null);
- var w = new TestWritable();
- w.on('finish', common.mustCall());
- piper.pipe(w);
-}), 1);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
+class TestWritable extends Writable {
+ constructor(opt) {
+ super(opt)
+ this._written = []
+ }
-_list.pop();
+ _write(chunk, encoding, cb) {
+ this._written.push(chunk)
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ cb()
+ }
+} // This one should not emit 'end' until we read() from it later.
+
+const ender = new TestReadable() // What happens when you pipe() a Readable that's already ended?
+
+const piper = new TestReadable() // pushes EOF null, and length=0, so this will trigger 'end'
+
+piper.read()
+setTimeout(
+ common.mustCall(function () {
+ ender.on('end', common.mustCall())
+ const c = ender.read()
+ assert.strictEqual(c, null)
+ const w = new TestWritable()
+ w.on('finish', common.mustCall())
+ piper.pipe(w)
+ }),
+ 1
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-await-drain-manual-resume.js b/test/parallel/test-stream-pipe-await-drain-manual-resume.js
index aacd62cdf7..9546d06a70 100644
--- a/test/parallel/test-stream-pipe-await-drain-manual-resume.js
+++ b/test/parallel/test-stream-pipe-await-drain-manual-resume.js
@@ -1,100 +1,101 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var assert = require('assert/'); // A consumer stream with a very low highWaterMark, which starts in a state
+const assert = require('assert') // A consumer stream with a very low highWaterMark, which starts in a state
// where it buffers the chunk it receives rather than indicating that they
// have been consumed.
-
-var writable = new stream.Writable({
+const writable = new stream.Writable({
highWaterMark: 5
-});
-var isCurrentlyBufferingWrites = true;
-var queue = [];
-
-writable._write = function (chunk, encoding, cb) {
- if (isCurrentlyBufferingWrites) queue.push({
- chunk: chunk,
- cb: cb
- });else cb();
-};
-
-var readable = new stream.Readable({
- read: function read() {}
-});
-readable.pipe(writable);
-readable.once('pause', common.mustCall(function () {
- assert.strictEqual(readable._readableState.awaitDrain, 1, 'Expected awaitDrain to equal 1 but instead got ' + "".concat(readable._readableState.awaitDrain)); // First pause, resume manually. The next write() to writable will still
- // return false, because chunks are still being buffered, so it will increase
- // the awaitDrain counter again.
-
- process.nextTick(common.mustCall(function () {
- readable.resume();
- }));
- readable.once('pause', common.mustCall(function () {
- assert.strictEqual(readable._readableState.awaitDrain, 1, '.resume() should not reset the counter but instead got ' + "".concat(readable._readableState.awaitDrain)); // Second pause, handle all chunks from now on. Once all callbacks that
- // are currently queued up are handled, the awaitDrain drain counter should
- // fall back to 0 and all chunks that are pending on the readable side
- // should be flushed.
-
- isCurrentlyBufferingWrites = false;
- var _iteratorNormalCompletion = true;
- var _didIteratorError = false;
- var _iteratorError = undefined;
-
- try {
- for (var _iterator = queue[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
- var queued = _step.value;
- queued.cb();
- }
- } catch (err) {
- _didIteratorError = true;
- _iteratorError = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion && _iterator.return != null) {
- _iterator.return();
- }
- } finally {
- if (_didIteratorError) {
- throw _iteratorError;
- }
- }
- }
- }));
-}));
-readable.push(bufferShim.alloc(100)); // Fill the writable HWM, first 'pause'.
-
-readable.push(bufferShim.alloc(100)); // Second 'pause'.
-
-readable.push(bufferShim.alloc(100)); // Should get through to the writable.
-
-readable.push(null);
-writable.on('finish', common.mustCall(function () {
- assert.strictEqual(readable._readableState.awaitDrain, 0, 'awaitDrain should equal 0 after all chunks are written but instead got' + "".concat(readable._readableState.awaitDrain)); // Everything okay, all chunks were written.
-}));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+let isCurrentlyBufferingWrites = true
+const queue = []
+
+writable._write = (chunk, encoding, cb) => {
+ if (isCurrentlyBufferingWrites)
+ queue.push({
+ chunk,
+ cb
+ })
+ else cb()
+}
+
+const readable = new stream.Readable({
+ read() {}
+})
+readable.pipe(writable)
+readable.once(
+ 'pause',
+ common.mustCall(() => {
+ assert.strictEqual(
+ readable._readableState.awaitDrainWriters,
+ writable,
+ 'Expected awaitDrainWriters to be a Writable but instead got ' + `${readable._readableState.awaitDrainWriters}`
+ ) // First pause, resume manually. The next write() to writable will still
+ // return false, because chunks are still being buffered, so it will increase
+ // the awaitDrain counter again.
+
+ process.nextTick(
+ common.mustCall(() => {
+ readable.resume()
+ })
+ )
+ readable.once(
+ 'pause',
+ common.mustCall(() => {
+ assert.strictEqual(
+ readable._readableState.awaitDrainWriters,
+ writable,
+ '.resume() should not reset the awaitDrainWriters, but instead got ' +
+ `${readable._readableState.awaitDrainWriters}`
+ ) // Second pause, handle all chunks from now on. Once all callbacks that
+ // are currently queued up are handled, the awaitDrain drain counter should
+ // fall back to 0 and all chunks that are pending on the readable side
+ // should be flushed.
+
+ isCurrentlyBufferingWrites = false
+
+ for (const queued of queue) queued.cb()
+ })
+ )
+ })
+)
+readable.push(Buffer.alloc(100)) // Fill the writable HWM, first 'pause'.
+
+readable.push(Buffer.alloc(100)) // Second 'pause'.
+
+readable.push(Buffer.alloc(100)) // Should get through to the writable.
+
+readable.push(null)
+writable.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(
+ readable._readableState.awaitDrainWriters,
+ null,
+ `awaitDrainWriters should be reset to null
+ after all chunks are written but instead got
+ ${readable._readableState.awaitDrainWriters}`
+ ) // Everything okay, all chunks were written.
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-await-drain-push-while-write.js b/test/parallel/test-stream-pipe-await-drain-push-while-write.js
index 9ee200a899..85898e2bdd 100644
--- a/test/parallel/test-stream-pipe-await-drain-push-while-write.js
+++ b/test/parallel/test-stream-pipe-await-drain-push-while-write.js
@@ -1,59 +1,54 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
-var writable = new stream.Writable({
+const writable = new stream.Writable({
write: common.mustCall(function (chunk, encoding, cb) {
- assert.strictEqual(readable._readableState.awaitDrain, 0);
+ assert.strictEqual(readable._readableState.awaitDrainWriters, null)
if (chunk.length === 32 * 1024) {
// first chunk
- readable.push(bufferShim.alloc(34 * 1024)); // above hwm
+ readable.push(Buffer.alloc(34 * 1024)) // above hwm
// We should check if awaitDrain counter is increased in the next
// tick, because awaitDrain is incremented after this method finished
- process.nextTick(function () {
- assert.strictEqual(readable._readableState.awaitDrain, 1);
- });
+ process.nextTick(() => {
+ assert.strictEqual(readable._readableState.awaitDrainWriters, writable)
+ })
}
- cb();
+ process.nextTick(cb)
}, 3)
-}); // A readable stream which produces two buffers.
+}) // A readable stream which produces two buffers.
-var bufs = [bufferShim.alloc(32 * 1024), bufferShim.alloc(33 * 1024)]; // above hwm
+const bufs = [Buffer.alloc(32 * 1024), Buffer.alloc(33 * 1024)] // above hwm
-var readable = new stream.Readable({
- read: function read() {
+const readable = new stream.Readable({
+ read: function () {
while (bufs.length > 0) {
- this.push(bufs.shift());
+ this.push(bufs.shift())
}
}
-});
-readable.pipe(writable);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+readable.pipe(writable)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-await-drain.js b/test/parallel/test-stream-pipe-await-drain.js
index f63ae4eeec..48945d621d 100644
--- a/test/parallel/test-stream-pipe-await-drain.js
+++ b/test/parallel/test-stream-pipe-await-drain.js
@@ -1,69 +1,75 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var assert = require('assert/'); // This is very similar to test-stream-pipe-cleanup-pause.js.
+const assert = require('assert') // This is very similar to test-stream-pipe-cleanup-pause.js.
-
-var reader = new stream.Readable();
-var writer1 = new stream.Writable();
-var writer2 = new stream.Writable();
-var writer3 = new stream.Writable(); // 560000 is chosen here because it is larger than the (default) highWaterMark
+const reader = new stream.Readable()
+const writer1 = new stream.Writable()
+const writer2 = new stream.Writable()
+const writer3 = new stream.Writable() // 560000 is chosen here because it is larger than the (default) highWaterMark
// and will cause `.write()` to return false
// See: https://github.com/nodejs/node/issues/5820
-var buffer = bufferShim.allocUnsafe(560000);
+const buffer = Buffer.allocUnsafe(560000)
-reader._read = function () {};
+reader._read = () => {}
writer1._write = common.mustCall(function (chunk, encoding, cb) {
- this.emit('chunk-received');
- cb();
-}, 1);
-writer1.once('chunk-received', function () {
- assert.strictEqual(reader._readableState.awaitDrain, 0, 'awaitDrain initial value should be 0, actual is ' + reader._readableState.awaitDrain);
- setImmediate(function () {
+ this.emit('chunk-received')
+ process.nextTick(cb)
+}, 1)
+writer1.once('chunk-received', () => {
+ assert.strictEqual(
+ reader._readableState.awaitDrainWriters.size,
+ 0,
+ 'awaitDrain initial value should be 0, actual is ' + reader._readableState.awaitDrainWriters.size
+ )
+ setImmediate(() => {
// This one should *not* get through to writer1 because writer2 is not
// "done" processing.
- reader.push(buffer);
- });
-}); // A "slow" consumer:
-
-writer2._write = common.mustCall(function (chunk, encoding, cb) {
- assert.strictEqual(reader._readableState.awaitDrain, 1, 'awaitDrain should be 1 after first push, actual is ' + reader._readableState.awaitDrain); // Not calling cb here to "simulate" slow stream.
+ reader.push(buffer)
+ })
+}) // A "slow" consumer:
+
+writer2._write = common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(
+ reader._readableState.awaitDrainWriters.size,
+ 1,
+ 'awaitDrain should be 1 after first push, actual is ' + reader._readableState.awaitDrainWriters.size
+ ) // Not calling cb here to "simulate" slow stream.
// This should be called exactly once, since the first .write() call
// will return false.
-}, 1);
-writer3._write = common.mustCall(function (chunk, encoding, cb) {
- assert.strictEqual(reader._readableState.awaitDrain, 2, 'awaitDrain should be 2 after second push, actual is ' + reader._readableState.awaitDrain); // Not calling cb here to "simulate" slow stream.
+}, 1)
+writer3._write = common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(
+ reader._readableState.awaitDrainWriters.size,
+ 2,
+ 'awaitDrain should be 2 after second push, actual is ' + reader._readableState.awaitDrainWriters.size
+ ) // Not calling cb here to "simulate" slow stream.
// This should be called exactly once, since the first .write() call
// will return false.
-}, 1);
-reader.pipe(writer1);
-reader.pipe(writer2);
-reader.pipe(writer3);
-reader.push(buffer);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+}, 1)
+reader.pipe(writer1)
+reader.pipe(writer2)
+reader.pipe(writer3)
+reader.push(buffer)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-cleanup-pause.js b/test/parallel/test-stream-pipe-cleanup-pause.js
index 8f1e11df82..c924500798 100644
--- a/test/parallel/test-stream-pipe-cleanup-pause.js
+++ b/test/parallel/test-stream-pipe-cleanup-pause.js
@@ -1,58 +1,53 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var reader = new stream.Readable();
-var writer1 = new stream.Writable();
-var writer2 = new stream.Writable(); // 560000 is chosen here because it is larger than the (default) highWaterMark
+const reader = new stream.Readable()
+const writer1 = new stream.Writable()
+const writer2 = new stream.Writable() // 560000 is chosen here because it is larger than the (default) highWaterMark
// and will cause `.write()` to return false
// See: https://github.com/nodejs/node/issues/2323
-var buffer = bufferShim.allocUnsafe(560000);
+const buffer = Buffer.allocUnsafe(560000)
-reader._read = function () {};
+reader._read = () => {}
writer1._write = common.mustCall(function (chunk, encoding, cb) {
- this.emit('chunk-received');
- cb();
-}, 1);
+ this.emit('chunk-received')
+ cb()
+}, 1)
writer1.once('chunk-received', function () {
- reader.unpipe(writer1);
- reader.pipe(writer2);
- reader.push(buffer);
+ reader.unpipe(writer1)
+ reader.pipe(writer2)
+ reader.push(buffer)
setImmediate(function () {
- reader.push(buffer);
+ reader.push(buffer)
setImmediate(function () {
- reader.push(buffer);
- });
- });
-});
+ reader.push(buffer)
+ })
+ })
+})
writer2._write = common.mustCall(function (chunk, encoding, cb) {
- cb();
-}, 3);
-reader.pipe(writer1);
-reader.push(buffer);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ cb()
+}, 3)
+reader.pipe(writer1)
+reader.push(buffer)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-cleanup.js b/test/parallel/test-stream-pipe-cleanup.js
index a761f38c2e..7fa1422758 100644
--- a/test/parallel/test-stream-pipe-cleanup.js
+++ b/test/parallel/test-stream-pipe-cleanup.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,143 +18,134 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-// This test asserts that Stream.prototype.pipe does not leave listeners
-// hanging on the source or dest.
-
-
-require('../common');
-
-var stream = require('../../');
-
-var assert = require('assert/');
-
-(function () {
- if (/^v0\.8\./.test(process.version)) return;
-
- function Writable() {
- this.writable = true;
- this.endCalls = 0;
-
- require('stream').Stream.call(this);
- }
+const tap = require('tap')
- Object.setPrototypeOf(Writable.prototype, require('stream').Stream.prototype);
- Object.setPrototypeOf(Writable, require('stream').Stream);
+const silentConsole = {
+ log() {},
- Writable.prototype.end = function () {
- this.endCalls++;
- };
-
- Writable.prototype.destroy = function () {
- this.endCalls++;
- };
-
- function Readable() {
- this.readable = true;
-
- require('stream').Stream.call(this);
- }
-
- Object.setPrototypeOf(Readable.prototype, require('stream').Stream.prototype);
- Object.setPrototypeOf(Readable, require('stream').Stream);
-
- function Duplex() {
- this.readable = true;
- Writable.call(this);
- }
-
- Object.setPrototypeOf(Duplex.prototype, Writable.prototype);
- Object.setPrototypeOf(Duplex, Writable);
- var i = 0;
- var limit = 100;
- var w = new Writable();
- var r;
-
- for (i = 0; i < limit; i++) {
- r = new Readable();
- r.pipe(w);
- r.emit('end');
- }
+ error() {}
+} // This test asserts that Stream.prototype.pipe does not leave listeners
+// hanging on the source or dest.
- assert.strictEqual(r.listeners('end').length, 0);
- assert.strictEqual(w.endCalls, limit);
- w.endCalls = 0;
+require('../common')
- for (i = 0; i < limit; i++) {
- r = new Readable();
- r.pipe(w);
- r.emit('close');
- }
+const stream = require('../../lib/ours/index')
- assert.strictEqual(r.listeners('close').length, 0);
- assert.strictEqual(w.endCalls, limit);
- w.endCalls = 0;
- r = new Readable();
+const assert = require('assert')
- for (i = 0; i < limit; i++) {
- w = new Writable();
- r.pipe(w);
- w.emit('close');
+function Writable() {
+ this.writable = true
+ this.endCalls = 0
+ stream.Stream.call(this)
+}
+
+Object.setPrototypeOf(Writable.prototype, stream.Stream.prototype)
+Object.setPrototypeOf(Writable, stream.Stream)
+
+Writable.prototype.end = function () {
+ this.endCalls++
+}
+
+Writable.prototype.destroy = function () {
+ this.endCalls++
+}
+
+function Readable() {
+ this.readable = true
+ stream.Stream.call(this)
+}
+
+Object.setPrototypeOf(Readable.prototype, stream.Stream.prototype)
+Object.setPrototypeOf(Readable, stream.Stream)
+
+function Duplex() {
+ this.readable = true
+ Writable.call(this)
+}
+
+Object.setPrototypeOf(Duplex.prototype, Writable.prototype)
+Object.setPrototypeOf(Duplex, Writable)
+let i = 0
+const limit = 100
+let w = new Writable()
+let r
+
+for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('end')
+}
+
+assert.strictEqual(r.listeners('end').length, 0)
+assert.strictEqual(w.endCalls, limit)
+w.endCalls = 0
+
+for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('close')
+}
+
+assert.strictEqual(r.listeners('close').length, 0)
+assert.strictEqual(w.endCalls, limit)
+w.endCalls = 0
+r = new Readable()
+
+for (i = 0; i < limit; i++) {
+ w = new Writable()
+ r.pipe(w)
+ w.emit('close')
+}
+
+assert.strictEqual(w.listeners('close').length, 0)
+r = new Readable()
+w = new Writable()
+const d = new Duplex()
+r.pipe(d) // pipeline A
+
+d.pipe(w) // pipeline B
+
+assert.strictEqual(r.listeners('end').length, 2) // A.onend, A.cleanup
+
+assert.strictEqual(r.listeners('close').length, 2) // A.onclose, A.cleanup
+
+assert.strictEqual(d.listeners('end').length, 2) // B.onend, B.cleanup
+// A.cleanup, B.onclose, B.cleanup
+
+assert.strictEqual(d.listeners('close').length, 3)
+assert.strictEqual(w.listeners('end').length, 0)
+assert.strictEqual(w.listeners('close').length, 1) // B.cleanup
+
+r.emit('end')
+assert.strictEqual(d.endCalls, 1)
+assert.strictEqual(w.endCalls, 0)
+assert.strictEqual(r.listeners('end').length, 0)
+assert.strictEqual(r.listeners('close').length, 0)
+assert.strictEqual(d.listeners('end').length, 2) // B.onend, B.cleanup
+
+assert.strictEqual(d.listeners('close').length, 2) // B.onclose, B.cleanup
+
+assert.strictEqual(w.listeners('end').length, 0)
+assert.strictEqual(w.listeners('close').length, 1) // B.cleanup
+
+d.emit('end')
+assert.strictEqual(d.endCalls, 1)
+assert.strictEqual(w.endCalls, 1)
+assert.strictEqual(r.listeners('end').length, 0)
+assert.strictEqual(r.listeners('close').length, 0)
+assert.strictEqual(d.listeners('end').length, 0)
+assert.strictEqual(d.listeners('close').length, 0)
+assert.strictEqual(w.listeners('end').length, 0)
+assert.strictEqual(w.listeners('close').length, 0)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
}
-
- assert.strictEqual(w.listeners('close').length, 0);
- r = new Readable();
- w = new Writable();
- var d = new Duplex();
- r.pipe(d); // pipeline A
-
- d.pipe(w); // pipeline B
-
- assert.strictEqual(r.listeners('end').length, 2); // A.onend, A.cleanup
-
- assert.strictEqual(r.listeners('close').length, 2); // A.onclose, A.cleanup
-
- assert.strictEqual(d.listeners('end').length, 2); // B.onend, B.cleanup
- // A.cleanup, B.onclose, B.cleanup
-
- assert.strictEqual(d.listeners('close').length, 3);
- assert.strictEqual(w.listeners('end').length, 0);
- assert.strictEqual(w.listeners('close').length, 1); // B.cleanup
-
- r.emit('end');
- assert.strictEqual(d.endCalls, 1);
- assert.strictEqual(w.endCalls, 0);
- assert.strictEqual(r.listeners('end').length, 0);
- assert.strictEqual(r.listeners('close').length, 0);
- assert.strictEqual(d.listeners('end').length, 2); // B.onend, B.cleanup
-
- assert.strictEqual(d.listeners('close').length, 2); // B.onclose, B.cleanup
-
- assert.strictEqual(w.listeners('end').length, 0);
- assert.strictEqual(w.listeners('close').length, 1); // B.cleanup
-
- d.emit('end');
- assert.strictEqual(d.endCalls, 1);
- assert.strictEqual(w.endCalls, 1);
- assert.strictEqual(r.listeners('end').length, 0);
- assert.strictEqual(r.listeners('close').length, 0);
- assert.strictEqual(d.listeners('end').length, 0);
- assert.strictEqual(d.listeners('close').length, 0);
- assert.strictEqual(w.listeners('end').length, 0);
- assert.strictEqual(w.listeners('close').length, 0);
-})();
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-error-handling.js b/test/parallel/test-stream-pipe-error-handling.js
index b86a4d01fb..3c7b1a2a1a 100644
--- a/test/parallel/test-stream-pipe-error-handling.js
+++ b/test/parallel/test-stream-pipe-error-handling.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,117 +18,123 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var Stream = require('stream').Stream;
+const { Stream, PassThrough } = require('../../lib/ours/index')
{
- var source = new Stream();
- var dest = new Stream();
- source.pipe(dest);
- var gotErr = null;
+ const source = new Stream()
+ const dest = new Stream()
+ source.pipe(dest)
+ let gotErr = null
source.on('error', function (err) {
- gotErr = err;
- });
- var err = new Error('This stream turned into bacon.');
- source.emit('error', err);
- assert.strictEqual(gotErr, err);
+ gotErr = err
+ })
+ const err = new Error('This stream turned into bacon.')
+ source.emit('error', err)
+ assert.strictEqual(gotErr, err)
}
{
- var _source = new Stream();
-
- var _dest = new Stream();
-
- _source.pipe(_dest);
-
- var _err = new Error('This stream turned into bacon.');
-
- var _gotErr = null;
+ const source = new Stream()
+ const dest = new Stream()
+ source.pipe(dest)
+ const err = new Error('This stream turned into bacon.')
+ let gotErr = null
try {
- _source.emit('error', _err);
+ source.emit('error', err)
} catch (e) {
- _gotErr = e;
+ gotErr = e
}
- assert.strictEqual(_gotErr, _err);
+ assert.strictEqual(gotErr, err)
}
{
- var R = require('../../').Readable;
-
- var W = require('../../').Writable;
-
- var r = new R();
- var w = new W();
- var removed = false;
+ const R = Stream.Readable
+ const W = Stream.Writable
+ const r = new R({
+ autoDestroy: false
+ })
+ const w = new W({
+ autoDestroy: false
+ })
+ let removed = false
r._read = common.mustCall(function () {
- setTimeout(common.mustCall(function () {
- assert(removed);
- assert.throws(function () {
- w.emit('error', new Error('fail'));
- }, /^Error: fail$/);
- }), 1);
- });
- w.on('error', myOnError);
- r.pipe(w);
- w.removeListener('error', myOnError);
- removed = true;
+ setTimeout(
+ common.mustCall(function () {
+ assert(removed)
+ assert.throws(function () {
+ w.emit('error', new Error('fail'))
+ }, /^Error: fail$/)
+ }),
+ 1
+ )
+ })
+ w.on('error', myOnError)
+ r.pipe(w)
+ w.removeListener('error', myOnError)
+ removed = true
function myOnError() {
- throw new Error('this should not happen');
+ throw new Error('this should not happen')
}
}
{
- var _R = require('../../').Readable;
-
- var _W = require('../../').Writable;
-
- var _r = new _R();
-
- var _w = new _W();
-
- var _removed = false;
- _r._read = common.mustCall(function () {
- setTimeout(common.mustCall(function () {
- assert(_removed);
-
- _w.emit('error', new Error('fail'));
- }), 1);
- });
-
- _w.on('error', common.mustCall());
-
- _w._write = function () {};
-
- _r.pipe(_w); // Removing some OTHER random listener should not do anything
-
-
- _w.removeListener('error', function () {});
-
- _removed = true;
+ const R = Stream.Readable
+ const W = Stream.Writable
+ const r = new R()
+ const w = new W()
+ let removed = false
+ r._read = common.mustCall(function () {
+ setTimeout(
+ common.mustCall(function () {
+ assert(removed)
+ w.emit('error', new Error('fail'))
+ }),
+ 1
+ )
+ })
+ w.on('error', common.mustCall())
+
+ w._write = () => {}
+
+ r.pipe(w) // Removing some OTHER random listener should not do anything
+
+ w.removeListener('error', () => {})
+ removed = true
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+{
+ const _err = new Error('this should be handled')
+
+ const destination = new PassThrough()
+ destination.once(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ const stream = new Stream()
+ stream.pipe(destination)
+ destination.destroy(_err)
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-error-unhandled.js b/test/parallel/test-stream-pipe-error-unhandled.js
new file mode 100644
index 0000000000..557ed6b39b
--- /dev/null
+++ b/test/parallel/test-stream-pipe-error-unhandled.js
@@ -0,0 +1,43 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Readable, Writable } = require('../../lib/ours/index')
+
+process.on(
+ 'uncaughtException',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'asd')
+ })
+)
+const r = new Readable({
+ read() {
+ this.push('asd')
+ }
+})
+const w = new Writable({
+ autoDestroy: true,
+
+ write() {}
+})
+r.pipe(w)
+w.destroy(new Error('asd'))
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-event.js b/test/parallel/test-stream-pipe-event.js
index 43d0ab0971..e997c9df4c 100644
--- a/test/parallel/test-stream-pipe-event.js
+++ b/test/parallel/test-stream-pipe-event.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,57 +18,51 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
function Writable() {
- this.writable = true;
-
- require('stream').Stream.call(this);
+ this.writable = true
+ stream.Stream.call(this)
}
-Object.setPrototypeOf(Writable.prototype, require('stream').Stream.prototype);
-Object.setPrototypeOf(Writable, require('stream').Stream);
+Object.setPrototypeOf(Writable.prototype, stream.Stream.prototype)
+Object.setPrototypeOf(Writable, stream.Stream)
function Readable() {
- this.readable = true;
-
- require('stream').Stream.call(this);
+ this.readable = true
+ stream.Stream.call(this)
}
-Object.setPrototypeOf(Readable.prototype, require('stream').Stream.prototype);
-Object.setPrototypeOf(Readable, require('stream').Stream);
-var passed = false;
-var w = new Writable();
+Object.setPrototypeOf(Readable.prototype, stream.Stream.prototype)
+Object.setPrototypeOf(Readable, stream.Stream)
+let passed = false
+const w = new Writable()
w.on('pipe', function (src) {
- passed = true;
-});
-var r = new Readable();
-r.pipe(w);
-assert.ok(passed);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ passed = true
+})
+const r = new Readable()
+r.pipe(w)
+assert.ok(passed)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-flow-after-unpipe.js b/test/parallel/test-stream-pipe-flow-after-unpipe.js
index a87cc937a7..d598f48214 100644
--- a/test/parallel/test-stream-pipe-flow-after-unpipe.js
+++ b/test/parallel/test-stream-pipe-flow-after-unpipe.js
@@ -1,54 +1,45 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var _require = require('../../'),
- Readable = _require.Readable,
- Writable = _require.Writable; // Tests that calling .unpipe() un-blocks a stream that is paused because
+const { Readable, Writable } = require('../../lib/ours/index') // Tests that calling .unpipe() un-blocks a stream that is paused because
// it is waiting on the writable side to finish a write().
-
-var rs = new Readable({
+const rs = new Readable({
highWaterMark: 1,
// That this gets called at least 20 times is the real test here.
- read: common.mustCallAtLeast(function () {
- return rs.push('foo');
- }, 20)
-});
-var ws = new Writable({
+ read: common.mustCallAtLeast(() => rs.push('foo'), 20)
+})
+const ws = new Writable({
highWaterMark: 1,
- write: common.mustCall(function () {
+ write: common.mustCall(() => {
// Ignore the callback, this write() simply never finishes.
- setImmediate(function () {
- return rs.unpipe(ws);
- });
+ setImmediate(() => rs.unpipe(ws))
})
-});
-var chunks = 0;
-rs.on('data', common.mustCallAtLeast(function () {
- chunks++;
- if (chunks >= 20) rs.pause(); // Finish this test.
-}));
-rs.pipe(ws);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+let chunks = 0
+rs.on(
+ 'data',
+ common.mustCallAtLeast(() => {
+ chunks++
+ if (chunks >= 20) rs.pause() // Finish this test.
+ })
+)
+rs.pipe(ws)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-flow.js b/test/parallel/test-stream-pipe-flow.js
index f1ba58e46d..b38571a712 100644
--- a/test/parallel/test-stream-pipe-flow.js
+++ b/test/parallel/test-stream-pipe-flow.js
@@ -1,95 +1,113 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
-var _require = require('../../'),
- Readable = _require.Readable,
- Writable = _require.Writable,
- PassThrough = _require.PassThrough;
+const { Readable, Writable, PassThrough } = require('../../lib/ours/index')
{
- var ticks = 17;
- var rs = new Readable({
+ let ticks = 17
+ const rs = new Readable({
objectMode: true,
- read: function read() {
- if (ticks-- > 0) return process.nextTick(function () {
- return rs.push({});
- });
- rs.push({});
- rs.push(null);
+ read: () => {
+ if (ticks-- > 0) return process.nextTick(() => rs.push({}))
+ rs.push({})
+ rs.push(null)
}
- });
- var ws = new Writable({
+ })
+ const ws = new Writable({
highWaterMark: 0,
objectMode: true,
- write: function write(data, end, cb) {
- return setImmediate(cb);
- }
- });
- rs.on('end', common.mustCall());
- ws.on('finish', common.mustCall());
- rs.pipe(ws);
+ write: (data, end, cb) => setImmediate(cb)
+ })
+ rs.on('end', common.mustCall())
+ ws.on('finish', common.mustCall())
+ rs.pipe(ws)
}
{
- var missing = 8;
-
- var _rs = new Readable({
+ let missing = 8
+ const rs = new Readable({
objectMode: true,
- read: function read() {
- if (missing--) _rs.push({});else _rs.push(null);
+ read: () => {
+ if (missing--) rs.push({})
+ else rs.push(null)
}
- });
-
- var pt = _rs.pipe(new PassThrough({
- objectMode: true,
- highWaterMark: 2
- })).pipe(new PassThrough({
- objectMode: true,
- highWaterMark: 2
- }));
-
- pt.on('end', function () {
- wrapper.push(null);
- });
- var wrapper = new Readable({
+ })
+ const pt = rs
+ .pipe(
+ new PassThrough({
+ objectMode: true,
+ highWaterMark: 2
+ })
+ )
+ .pipe(
+ new PassThrough({
+ objectMode: true,
+ highWaterMark: 2
+ })
+ )
+ pt.on('end', () => {
+ wrapper.push(null)
+ })
+ const wrapper = new Readable({
objectMode: true,
- read: function read() {
- process.nextTick(function () {
- var data = pt.read();
+ read: () => {
+ process.nextTick(() => {
+ let data = pt.read()
if (data === null) {
- pt.once('readable', function () {
- data = pt.read();
- if (data !== null) wrapper.push(data);
- });
+ pt.once('readable', () => {
+ data = pt.read()
+ if (data !== null) wrapper.push(data)
+ })
} else {
- wrapper.push(data);
+ wrapper.push(data)
}
- });
+ })
}
- });
- wrapper.resume();
- wrapper.on('end', common.mustCall());
+ })
+ wrapper.resume()
+ wrapper.on('end', common.mustCall())
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+{
+ // Only register drain if there is backpressure.
+ const rs = new Readable({
+ read() {}
+ })
+ const pt = rs.pipe(
+ new PassThrough({
+ objectMode: true,
+ highWaterMark: 2
+ })
+ )
+ assert.strictEqual(pt.listenerCount('drain'), 0)
+ pt.on('finish', () => {
+ assert.strictEqual(pt.listenerCount('drain'), 0)
+ })
+ rs.push('asd')
+ assert.strictEqual(pt.listenerCount('drain'), 0)
+ process.nextTick(() => {
+ rs.push('asd')
+ assert.strictEqual(pt.listenerCount('drain'), 0)
+ rs.push(null)
+ assert.strictEqual(pt.listenerCount('drain'), 0)
+ })
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-manual-resume.js b/test/parallel/test-stream-pipe-manual-resume.js
index 1ac02b0780..c95c9d98d1 100644
--- a/test/parallel/test-stream-pipe-manual-resume.js
+++ b/test/parallel/test-stream-pipe-manual-resume.js
@@ -1,62 +1,51 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
function test(throwCodeInbetween) {
// Check that a pipe does not stall if .read() is called unexpectedly
// (i.e. the stream is not resumed by the pipe).
- var n = 1000;
- var counter = n;
- var rs = stream.Readable({
+ const n = 1000
+ let counter = n
+ const rs = stream.Readable({
objectMode: true,
- read: common.mustCallAtLeast(function () {
- if (--counter >= 0) rs.push({
- counter: counter
- });else rs.push(null);
+ read: common.mustCallAtLeast(() => {
+ if (--counter >= 0)
+ rs.push({
+ counter
+ })
+ else rs.push(null)
}, n)
- });
- var ws = stream.Writable({
+ })
+ const ws = stream.Writable({
objectMode: true,
- write: common.mustCall(function (data, enc, cb) {
- setImmediate(cb);
+ write: common.mustCall((data, enc, cb) => {
+ setImmediate(cb)
}, n)
- });
- setImmediate(function () {
- return throwCodeInbetween(rs, ws);
- });
- rs.pipe(ws);
+ })
+ setImmediate(() => throwCodeInbetween(rs, ws))
+ rs.pipe(ws)
}
-test(function (rs) {
- return rs.read();
-});
-test(function (rs) {
- return rs.resume();
-});
-test(function () {
- return 0;
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+test((rs) => rs.read())
+test((rs) => rs.resume())
+test(() => 0)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-multiple-pipes.js b/test/parallel/test-stream-pipe-multiple-pipes.js
index 011923a043..2163ea8f38 100644
--- a/test/parallel/test-stream-pipe-multiple-pipes.js
+++ b/test/parallel/test-stream-pipe-multiple-pipes.js
@@ -1,113 +1,69 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
-var readable = new stream.Readable({
- read: function read() {}
-});
-var writables = [];
+const readable = new stream.Readable({
+ read: () => {}
+})
+const writables = []
-var _loop = function _loop(i) {
- var target = new stream.Writable({
- write: common.mustCall(function (chunk, encoding, callback) {
- target.output.push(chunk);
- callback();
+for (let i = 0; i < 5; i++) {
+ const target = new stream.Writable({
+ write: common.mustCall((chunk, encoding, callback) => {
+ target.output.push(chunk)
+ callback()
}, 1)
- });
- target.output = [];
- target.on('pipe', common.mustCall());
- readable.pipe(target);
- writables.push(target);
-};
-
-for (var i = 0; i < 5; i++) {
- _loop(i);
+ })
+ target.output = []
+ target.on('pipe', common.mustCall())
+ readable.pipe(target)
+ writables.push(target)
}
-var input = bufferShim.from([1, 2, 3, 4, 5]);
-readable.push(input); // The pipe() calls will postpone emission of the 'resume' event using nextTick,
+const input = Buffer.from([1, 2, 3, 4, 5])
+readable.push(input) // The pipe() calls will postpone emission of the 'resume' event using nextTick,
// so no data will be available to the writable streams until then.
-process.nextTick(common.mustCall(function () {
- var _iteratorNormalCompletion = true;
- var _didIteratorError = false;
- var _iteratorError = undefined;
-
- try {
- for (var _iterator = writables[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
- var target = _step.value;
- assert.deepStrictEqual(target.output, [input]);
- target.on('unpipe', common.mustCall());
- readable.unpipe(target);
- }
- } catch (err) {
- _didIteratorError = true;
- _iteratorError = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion && _iterator.return != null) {
- _iterator.return();
- }
- } finally {
- if (_didIteratorError) {
- throw _iteratorError;
- }
+process.nextTick(
+ common.mustCall(() => {
+ for (const target of writables) {
+ assert.deepStrictEqual(target.output, [input])
+ target.on('unpipe', common.mustCall())
+ readable.unpipe(target)
}
- }
-
- readable.push('something else'); // This does not get through.
-
- readable.push(null);
- readable.resume(); // Make sure the 'end' event gets emitted.
-}));
-readable.on('end', common.mustCall(function () {
- var _iteratorNormalCompletion2 = true;
- var _didIteratorError2 = false;
- var _iteratorError2 = undefined;
- try {
- for (var _iterator2 = writables[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
- var target = _step2.value;
- assert.deepStrictEqual(target.output, [input]);
- }
- } catch (err) {
- _didIteratorError2 = true;
- _iteratorError2 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion2 && _iterator2.return != null) {
- _iterator2.return();
- }
- } finally {
- if (_didIteratorError2) {
- throw _iteratorError2;
- }
+ readable.push('something else') // This does not get through.
+
+ readable.push(null)
+ readable.resume() // Make sure the 'end' event gets emitted.
+ })
+)
+readable.on(
+ 'end',
+ common.mustCall(() => {
+ for (const target of writables) {
+ assert.deepStrictEqual(target.output, [input])
}
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
}
-}));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-needDrain.js b/test/parallel/test-stream-pipe-needDrain.js
new file mode 100644
index 0000000000..09f51bba73
--- /dev/null
+++ b/test/parallel/test-stream-pipe-needDrain.js
@@ -0,0 +1,47 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Readable, Writable } = require('../../lib/ours/index') // Pipe should pause temporarily if writable needs drain.
+
+{
+ const w = new Writable({
+ write(buf, encoding, callback) {
+ process.nextTick(callback)
+ },
+
+ highWaterMark: 1
+ })
+
+ while (w.write('asd'));
+
+ assert.strictEqual(w.writableNeedDrain, true)
+ const r = new Readable({
+ read() {
+ this.push('asd')
+ this.push(null)
+ }
+ })
+ r.on('pause', common.mustCall(2))
+ r.on('end', common.mustCall())
+ r.pipe(w)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-same-destination-twice.js b/test/parallel/test-stream-pipe-same-destination-twice.js
index cbe6f66ad3..9d69e27011 100644
--- a/test/parallel/test-stream-pipe-same-destination-twice.js
+++ b/test/parallel/test-stream-pipe-same-destination-twice.js
@@ -1,102 +1,81 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common'); // Regression test for https://github.com/nodejs/node/issues/12718.
+ error() {}
+}
+const common = require('../common') // Regression test for https://github.com/nodejs/node/issues/12718.
// Tests that piping a source stream twice to the same destination stream
// works, and that a subsequent unpipe() call only removes the pipe *once*.
+const assert = require('assert')
-var assert = require('assert/');
-
-var _require = require('../../'),
- PassThrough = _require.PassThrough,
- Writable = _require.Writable;
+const { PassThrough, Writable } = require('../../lib/ours/index')
{
- var passThrough = new PassThrough();
- var dest = new Writable({
- write: common.mustCall(function (chunk, encoding, cb) {
- assert.strictEqual("".concat(chunk), 'foobar');
- cb();
+ const passThrough = new PassThrough()
+ const dest = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(`${chunk}`, 'foobar')
+ cb()
})
- });
- passThrough.pipe(dest);
- passThrough.pipe(dest);
- assert.strictEqual(passThrough._events.data.length, 2);
- assert.strictEqual(passThrough._readableState.pipesCount, 2);
- assert.strictEqual(passThrough._readableState.pipes[0], dest);
- assert.strictEqual(passThrough._readableState.pipes[1], dest);
- passThrough.unpipe(dest);
- assert.strictEqual(passThrough._events.data.length, 1);
- assert.strictEqual(passThrough._readableState.pipesCount, 1);
- assert.strictEqual(passThrough._readableState.pipes, dest);
- passThrough.write('foobar');
- passThrough.pipe(dest);
+ })
+ passThrough.pipe(dest)
+ passThrough.pipe(dest)
+ assert.strictEqual(passThrough._events.data.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes[0], dest)
+ assert.strictEqual(passThrough._readableState.pipes[1], dest)
+ passThrough.unpipe(dest)
+ assert.strictEqual(passThrough._events.data.length, 1)
+ assert.strictEqual(passThrough._readableState.pipes.length, 1)
+ assert.deepStrictEqual(passThrough._readableState.pipes, [dest])
+ passThrough.write('foobar')
+ passThrough.pipe(dest)
}
{
- var _passThrough = new PassThrough();
-
- var _dest = new Writable({
- write: common.mustCall(function (chunk, encoding, cb) {
- assert.strictEqual("".concat(chunk), 'foobar');
- cb();
+ const passThrough = new PassThrough()
+ const dest = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(`${chunk}`, 'foobar')
+ cb()
}, 2)
- });
-
- _passThrough.pipe(_dest);
-
- _passThrough.pipe(_dest);
-
- assert.strictEqual(_passThrough._events.data.length, 2);
- assert.strictEqual(_passThrough._readableState.pipesCount, 2);
- assert.strictEqual(_passThrough._readableState.pipes[0], _dest);
- assert.strictEqual(_passThrough._readableState.pipes[1], _dest);
-
- _passThrough.write('foobar');
+ })
+ passThrough.pipe(dest)
+ passThrough.pipe(dest)
+ assert.strictEqual(passThrough._events.data.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes[0], dest)
+ assert.strictEqual(passThrough._readableState.pipes[1], dest)
+ passThrough.write('foobar')
}
{
- var _passThrough2 = new PassThrough();
-
- var _dest2 = new Writable({
+ const passThrough = new PassThrough()
+ const dest = new Writable({
write: common.mustNotCall()
- });
-
- _passThrough2.pipe(_dest2);
-
- _passThrough2.pipe(_dest2);
-
- assert.strictEqual(_passThrough2._events.data.length, 2);
- assert.strictEqual(_passThrough2._readableState.pipesCount, 2);
- assert.strictEqual(_passThrough2._readableState.pipes[0], _dest2);
- assert.strictEqual(_passThrough2._readableState.pipes[1], _dest2);
-
- _passThrough2.unpipe(_dest2);
-
- _passThrough2.unpipe(_dest2);
-
- assert.strictEqual(_passThrough2._events.data, undefined);
- assert.strictEqual(_passThrough2._readableState.pipesCount, 0);
-
- _passThrough2.write('foobar');
+ })
+ passThrough.pipe(dest)
+ passThrough.pipe(dest)
+ assert.strictEqual(passThrough._events.data.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes[0], dest)
+ assert.strictEqual(passThrough._readableState.pipes[1], dest)
+ passThrough.unpipe(dest)
+ passThrough.unpipe(dest)
+ assert.strictEqual(passThrough._events.data, undefined)
+ assert.strictEqual(passThrough._readableState.pipes.length, 0)
+ passThrough.write('foobar')
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-unpipe-streams.js b/test/parallel/test-stream-pipe-unpipe-streams.js
index 163212c93f..759bacf50e 100644
--- a/test/parallel/test-stream-pipe-unpipe-streams.js
+++ b/test/parallel/test-stream-pipe-unpipe-streams.js
@@ -1,103 +1,113 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
-
-var assert = require('assert/');
-
-var _require = require('../../'),
- Readable = _require.Readable,
- Writable = _require.Writable;
-
-var source = Readable({
- read: function read() {}
-});
-var dest1 = Writable({
- write: function write() {}
-});
-var dest2 = Writable({
- write: function write() {}
-});
-source.pipe(dest1);
-source.pipe(dest2);
-dest1.on('unpipe', common.mustCall());
-dest2.on('unpipe', common.mustCall());
-assert.strictEqual(source._readableState.pipes[0], dest1);
-assert.strictEqual(source._readableState.pipes[1], dest2);
-assert.strictEqual(source._readableState.pipes.length, 2); // Should be able to unpipe them in the reverse order that they were piped.
-
-source.unpipe(dest2);
-assert.strictEqual(source._readableState.pipes, dest1);
-assert.notStrictEqual(source._readableState.pipes, dest2);
-dest2.on('unpipe', common.mustNotCall());
-source.unpipe(dest2);
-source.unpipe(dest1);
-assert.strictEqual(source._readableState.pipes, null);
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Readable, Writable } = require('../../lib/ours/index')
+
+const source = Readable({
+ read: () => {}
+})
+const dest1 = Writable({
+ write: () => {}
+})
+const dest2 = Writable({
+ write: () => {}
+})
+source.pipe(dest1)
+source.pipe(dest2)
+dest1.on('unpipe', common.mustCall())
+dest2.on('unpipe', common.mustCall())
+assert.strictEqual(source._readableState.pipes[0], dest1)
+assert.strictEqual(source._readableState.pipes[1], dest2)
+assert.strictEqual(source._readableState.pipes.length, 2) // Should be able to unpipe them in the reverse order that they were piped.
+
+source.unpipe(dest2)
+assert.deepStrictEqual(source._readableState.pipes, [dest1])
+assert.notStrictEqual(source._readableState.pipes, dest2)
+dest2.on('unpipe', common.mustNotCall())
+source.unpipe(dest2)
+source.unpipe(dest1)
+assert.strictEqual(source._readableState.pipes.length, 0)
{
- // test `cleanup()` if we unpipe all streams.
- var _source = Readable({
- read: function read() {}
- });
-
- var _dest = Writable({
- write: function write() {}
- });
-
- var _dest2 = Writable({
- write: function write() {}
- });
-
- var destCount = 0;
- var srcCheckEventNames = ['end', 'data'];
- var destCheckEventNames = ['close', 'finish', 'drain', 'error', 'unpipe'];
- var checkSrcCleanup = common.mustCall(function () {
- assert.strictEqual(_source._readableState.pipes, null);
- assert.strictEqual(_source._readableState.pipesCount, 0);
- assert.strictEqual(_source._readableState.flowing, false);
- srcCheckEventNames.forEach(function (eventName) {
- assert.strictEqual(_source.listenerCount(eventName), 0, "source's '".concat(eventName, "' event listeners not removed"));
- });
- });
+ // Test `cleanup()` if we unpipe all streams.
+ const source = Readable({
+ read: () => {}
+ })
+ const dest1 = Writable({
+ write: () => {}
+ })
+ const dest2 = Writable({
+ write: () => {}
+ })
+ let destCount = 0
+ const srcCheckEventNames = ['end', 'data']
+ const destCheckEventNames = ['close', 'finish', 'drain', 'error', 'unpipe']
+ const checkSrcCleanup = common.mustCall(() => {
+ assert.strictEqual(source._readableState.pipes.length, 0)
+ assert.strictEqual(source._readableState.flowing, false)
+ srcCheckEventNames.forEach((eventName) => {
+ assert.strictEqual(source.listenerCount(eventName), 0, `source's '${eventName}' event listeners not removed`)
+ })
+ })
function checkDestCleanup(dest) {
- var currentDestId = ++destCount;
-
- _source.pipe(dest);
-
- var unpipeChecker = common.mustCall(function () {
- assert.deepStrictEqual(dest.listeners('unpipe'), [unpipeChecker], "destination{".concat(currentDestId, "} should have a 'unpipe' event ") + 'listener which is `unpipeChecker`');
- dest.removeListener('unpipe', unpipeChecker);
- destCheckEventNames.forEach(function (eventName) {
- assert.strictEqual(dest.listenerCount(eventName), 0, "destination{".concat(currentDestId, "}'s '").concat(eventName, "' event ") + 'listeners not removed');
- });
- if (--destCount === 0) checkSrcCleanup();
- });
- dest.on('unpipe', unpipeChecker);
+ const currentDestId = ++destCount
+ source.pipe(dest)
+ const unpipeChecker = common.mustCall(() => {
+ assert.deepStrictEqual(
+ dest.listeners('unpipe'),
+ [unpipeChecker],
+ `destination{${currentDestId}} should have a 'unpipe' event ` + 'listener which is `unpipeChecker`'
+ )
+ dest.removeListener('unpipe', unpipeChecker)
+ destCheckEventNames.forEach((eventName) => {
+ assert.strictEqual(
+ dest.listenerCount(eventName),
+ 0,
+ `destination{${currentDestId}}'s '${eventName}' event ` + 'listeners not removed'
+ )
+ })
+ if (--destCount === 0) checkSrcCleanup()
+ })
+ dest.on('unpipe', unpipeChecker)
}
- checkDestCleanup(_dest);
- checkDestCleanup(_dest2);
-
- _source.unpipe();
+ checkDestCleanup(dest1)
+ checkDestCleanup(dest2)
+ source.unpipe()
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+{
+ const src = Readable({
+ read: () => {}
+ })
+ const dst = Writable({
+ write: () => {}
+ })
+ src.pipe(dst)
+ src.on(
+ 'resume',
+ common.mustCall(() => {
+ src.on('pause', common.mustCall())
+ src.unpipe(dst)
+ })
+ )
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-without-listenerCount.js b/test/parallel/test-stream-pipe-without-listenerCount.js
index 041218adb7..2db82dcb27 100644
--- a/test/parallel/test-stream-pipe-without-listenerCount.js
+++ b/test/parallel/test-stream-pipe-without-listenerCount.js
@@ -1,39 +1,34 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var r = new stream.Stream();
-r.listenerCount = undefined;
-var w = new stream.Stream();
-w.listenerCount = undefined;
+const r = new stream.Stream()
+r.listenerCount = undefined
+const w = new stream.Stream()
+w.listenerCount = undefined
w.on('pipe', function () {
- r.emit('error', new Error('Readable Error'));
- w.emit('error', new Error('Writable Error'));
-});
-r.on('error', common.mustCall());
-w.on('error', common.mustCall());
-r.pipe(w);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ r.emit('error', new Error('Readable Error'))
+ w.emit('error', new Error('Writable Error'))
+})
+r.on('error', common.mustCall())
+w.on('error', common.mustCall())
+r.pipe(w)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-async-iterator.js b/test/parallel/test-stream-pipeline-async-iterator.js
new file mode 100644
index 0000000000..e45e01e658
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-async-iterator.js
@@ -0,0 +1,45 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable, PassThrough, pipeline } = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+const _err = new Error('kaboom')
+
+async function run() {
+ const source = new Readable({
+ read() {}
+ })
+ source.push('hello')
+ source.push('world')
+ setImmediate(() => {
+ source.destroy(_err)
+ })
+ const iterator = pipeline(source, new PassThrough(), () => {})
+ iterator.setEncoding('utf8')
+
+ for await (const k of iterator) {
+ assert.strictEqual(k, 'helloworld')
+ }
+}
+
+run().catch(common.mustCall((err) => assert.strictEqual(err, _err)))
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-http2.js b/test/parallel/test-stream-pipeline-http2.js
new file mode 100644
index 0000000000..e494dde27c
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-http2.js
@@ -0,0 +1,57 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+if (!common.hasCrypto) common.skip('missing crypto')
+
+const { Readable, pipeline } = require('../../lib/ours/index')
+
+const http2 = require('http2')
+
+{
+ const server = http2.createServer((req, res) => {
+ pipeline(req, res, common.mustCall())
+ })
+ server.listen(0, () => {
+ const url = `http://localhost:${server.address().port}`
+ const client = http2.connect(url)
+ const req = client.request({
+ ':method': 'POST'
+ })
+ const rs = new Readable({
+ read() {
+ rs.push('hello')
+ }
+ })
+ pipeline(
+ rs,
+ req,
+ common.mustCall((err) => {
+ server.close()
+ client.close()
+ })
+ )
+ let cnt = 10
+ req.on('data', (data) => {
+ cnt--
+ if (cnt === 0) rs.destroy()
+ })
+ })
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-listeners.js b/test/parallel/test-stream-pipeline-listeners.js
new file mode 100644
index 0000000000..214d8f09ba
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-listeners.js
@@ -0,0 +1,103 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { pipeline, Duplex, PassThrough, Writable } = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+process.on(
+ 'uncaughtException',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'no way')
+ }, 2)
+) // Ensure that listeners is removed if last stream is readable
+// And other stream's listeners unchanged
+
+const a = new PassThrough()
+a.end('foobar')
+const b = new Duplex({
+ write(chunk, encoding, callback) {
+ callback()
+ }
+})
+pipeline(
+ a,
+ b,
+ common.mustCall((error) => {
+ if (error) {
+ assert.ifError(error)
+ }
+
+ assert(a.listenerCount('error') > 0)
+ assert.strictEqual(b.listenerCount('error'), 0)
+ setTimeout(() => {
+ assert.strictEqual(b.listenerCount('error'), 0)
+ b.destroy(new Error('no way'))
+ }, 100)
+ })
+) // Async generators
+
+const c = new PassThrough()
+c.end('foobar')
+const d = pipeline(
+ c,
+ async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ },
+ common.mustCall((error) => {
+ if (error) {
+ assert.ifError(error)
+ }
+
+ assert(c.listenerCount('error') > 0)
+ assert.strictEqual(d.listenerCount('error'), 0)
+ setTimeout(() => {
+ assert.strictEqual(b.listenerCount('error'), 0)
+ d.destroy(new Error('no way'))
+ }, 100)
+ })
+) // If last stream is not readable, will not throw and remove listeners
+
+const e = new PassThrough()
+e.end('foobar')
+const f = new Writable({
+ write(chunk, encoding, callback) {
+ callback()
+ }
+})
+pipeline(
+ e,
+ f,
+ common.mustCall((error) => {
+ if (error) {
+ assert.ifError(error)
+ }
+
+ assert(e.listenerCount('error') > 0)
+ assert(f.listenerCount('error') > 0)
+ setTimeout(() => {
+ assert(f.listenerCount('error') > 0)
+ f.destroy(new Error('no way'))
+ }, 100)
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-process.js b/test/parallel/test-stream-pipeline-process.js
new file mode 100644
index 0000000000..f00759c9d4
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-process.js
@@ -0,0 +1,39 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const os = require('os')
+
+if (process.argv[2] === 'child') {
+ const { pipeline } = require('../../lib/ours/index')
+
+ pipeline(process.stdin, process.stdout, common.mustSucceed())
+} else {
+ const cp = require('child_process')
+
+ cp.exec(
+ ['echo', 'hello', '|', `"${process.execPath}"`, `"${__filename}"`, 'child'].join(' '),
+ common.mustSucceed((stdout) => {
+ assert.strictEqual(stdout.split(os.EOL).shift().trim(), 'hello')
+ })
+ )
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-queued-end-in-destroy.js b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js
index ea252acd07..dcb7d99b8f 100644
--- a/test/parallel/test-stream-pipeline-queued-end-in-destroy.js
+++ b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js
@@ -1,61 +1,59 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var _require = require('../../'),
- Readable = _require.Readable,
- Duplex = _require.Duplex,
- pipeline = _require.pipeline; // Test that the callback for pipeline() is called even when the ._destroy()
+const { Readable, Duplex, pipeline } = require('../../lib/ours/index') // Test that the callback for pipeline() is called even when the ._destroy()
// method of the stream places an .end() request to itself that does not
// get processed before the destruction of the stream (i.e. the 'close' event).
// Refs: https://github.com/nodejs/node/issues/24456
-
-var readable = new Readable({
- read: common.mustCall(function () {})
-});
-var duplex = new Duplex({
- write: function write(chunk, enc, cb) {// Simulate messages queueing up.
+const readable = new Readable({
+ read: common.mustCall(() => {})
+})
+const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ // Simulate messages queueing up.
},
- read: function read() {},
- destroy: function destroy(err, cb) {
+
+ read() {},
+
+ destroy(err, cb) {
// Call end() from inside the destroy() method, like HTTP/2 streams
// do at the time of writing.
- this.end();
- cb(err);
+ this.end()
+ cb(err)
}
-});
-duplex.on('finished', common.mustNotCall());
-pipeline(readable, duplex, common.mustCall(function (err) {
- assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE');
-})); // Write one chunk of data, and destroy the stream later.
+})
+duplex.on('finished', common.mustNotCall())
+pipeline(
+ readable,
+ duplex,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+) // Write one chunk of data, and destroy the stream later.
// That should trigger the pipeline destruction.
-readable.push('foo');
-setImmediate(function () {
- readable.destroy();
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+readable.push('foo')
+setImmediate(() => {
+ readable.destroy()
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-uncaught.js b/test/parallel/test-stream-pipeline-uncaught.js
new file mode 100644
index 0000000000..306b7d7bf5
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-uncaught.js
@@ -0,0 +1,45 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { pipeline, PassThrough } = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+process.on(
+ 'uncaughtException',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'error')
+ })
+) // Ensure that pipeline that ends with Promise
+// still propagates error to uncaughtException.
+
+const s = new PassThrough()
+s.end('data')
+pipeline(
+ s,
+ async function (source) {
+ for await (const chunk of source) {
+ } // eslint-disable-line no-unused-vars, no-empty
+ },
+ common.mustSucceed(() => {
+ throw new Error('error')
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-with-empty-string.js b/test/parallel/test-stream-pipeline-with-empty-string.js
new file mode 100644
index 0000000000..4663e9d7c2
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-with-empty-string.js
@@ -0,0 +1,34 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { pipeline, PassThrough } = require('../../lib/ours/index')
+
+async function runTest() {
+ await pipeline(
+ '',
+ new PassThrough({
+ objectMode: true
+ }),
+ common.mustCall(() => {})
+ )
+}
+
+runTest().then(common.mustCall(() => {}))
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline.js b/test/parallel/test-stream-pipeline.js
deleted file mode 100644
index 686f57027f..0000000000
--- a/test/parallel/test-stream-pipeline.js
+++ /dev/null
@@ -1,483 +0,0 @@
-"use strict";
-
-function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
-
-function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var _require = require('../../'),
- Stream = _require.Stream,
- Writable = _require.Writable,
- Readable = _require.Readable,
- Transform = _require.Transform,
- pipeline = _require.pipeline;
-
-var assert = require('assert/');
-
-var http = require('http');
-
-var promisify = require('util-promisify');
-
-{
- var finished = false;
- var processed = [];
- var expected = [bufferShim.from('a'), bufferShim.from('b'), bufferShim.from('c')];
- var read = new Readable({
- read: function read() {}
- });
- var write = new Writable({
- write: function write(data, enc, cb) {
- processed.push(data);
- cb();
- }
- });
- write.on('finish', function () {
- finished = true;
- });
-
- for (var i = 0; i < expected.length; i++) {
- read.push(expected[i]);
- }
-
- read.push(null);
- pipeline(read, write, common.mustCall(function (err) {
- assert.ok(!err, 'no error');
- assert.ok(finished);
- assert.deepStrictEqual(processed, expected);
- }));
-}
-{
- var _read = new Readable({
- read: function read() {}
- });
-
- assert.throws(function () {
- pipeline(_read, function () {});
- }, /ERR_MISSING_ARGS/);
- assert.throws(function () {
- pipeline(function () {});
- }, /ERR_MISSING_ARGS/);
- assert.throws(function () {
- pipeline();
- }, /ERR_MISSING_ARGS/);
-}
-{
- var _read2 = new Readable({
- read: function read() {}
- });
-
- var _write = new Writable({
- write: function write(data, enc, cb) {
- cb();
- }
- });
-
- _read2.push('data');
-
- setImmediate(function () {
- return _read2.destroy();
- });
- pipeline(_read2, _write, common.mustCall(function (err) {
- assert.ok(err, 'should have an error');
- }));
-}
-{
- var _read3 = new Readable({
- read: function read() {}
- });
-
- var _write2 = new Writable({
- write: function write(data, enc, cb) {
- cb();
- }
- });
-
- _read3.push('data');
-
- setImmediate(function () {
- return _read3.destroy(new Error('kaboom'));
- });
- var dst = pipeline(_read3, _write2, common.mustCall(function (err) {
- assert.strictEqual(err.message, 'kaboom');
- }));
- assert.strictEqual(dst, _write2);
-}
-{
- var _read4 = new Readable({
- read: function read() {}
- });
-
- var transform = new Transform({
- transform: function transform(data, enc, cb) {
- process.nextTick(cb, new Error('kaboom'));
- }
- });
-
- var _write3 = new Writable({
- write: function write(data, enc, cb) {
- cb();
- }
- });
-
- _read4.on('close', common.mustCall());
-
- transform.on('close', common.mustCall());
-
- _write3.on('close', common.mustCall());
-
- var _dst = pipeline(_read4, transform, _write3, common.mustCall(function (err) {
- assert.strictEqual(err.message, 'kaboom');
- }));
-
- assert.strictEqual(_dst, _write3);
-
- _read4.push('hello');
-}
-{
- var server = http.createServer(function (req, res) {
- var rs = new Readable({
- read: function read() {
- rs.push('hello');
- rs.push(null);
- }
- });
- pipeline(rs, res, function () {});
- });
- server.listen(0, function () {
- var req = http.request({
- port: server.address().port
- });
- req.end();
- req.on('response', function (res) {
- var buf = [];
- res.on('data', function (data) {
- return buf.push(data);
- });
- res.on('end', common.mustCall(function () {
- assert.deepStrictEqual(Buffer.concat(buf), bufferShim.from('hello'));
- server.close();
- }));
- });
- });
-}
-{
- var _server = http.createServer(function (req, res) {
- var sent = false;
- var rs = new Readable({
- read: function read() {
- if (sent) {
- return;
- }
-
- sent = true;
- rs.push('hello');
- },
- destroy: common.mustCall(function (err, cb) {
- // prevents fd leaks by destroying http pipelines
- cb();
- })
- });
- pipeline(rs, res, function () {});
- });
-
- _server.listen(0, function () {
- var req = http.request({
- port: _server.address().port
- });
- req.end();
- req.on('response', function (res) {
- setImmediate(function () {
- res.destroy();
-
- _server.close();
- });
- });
- });
-}
-{
- var _server2 = http.createServer(function (req, res) {
- var sent = 0;
- var rs = new Readable({
- read: function read() {
- if (sent++ > 10) {
- return;
- }
-
- rs.push('hello');
- },
- destroy: common.mustCall(function (err, cb) {
- cb();
- })
- });
- pipeline(rs, res, function () {});
- });
-
- var cnt = 10;
- var badSink = new Writable({
- write: function write(data, enc, cb) {
- cnt--;
- if (cnt === 0) process.nextTick(cb, new Error('kaboom'));else cb();
- }
- });
-
- _server2.listen(0, function () {
- var req = http.request({
- port: _server2.address().port
- });
- req.end();
- req.on('response', function (res) {
- pipeline(res, badSink, common.mustCall(function (err) {
- assert.strictEqual(err.message, 'kaboom');
-
- _server2.close();
- }));
- });
- });
-}
-{
- var _server3 = http.createServer(function (req, res) {
- pipeline(req, res, common.mustCall());
- });
-
- _server3.listen(0, function () {
- var req = http.request({
- port: _server3.address().port
- });
- var sent = 0;
- var rs = new Readable({
- read: function read() {
- if (sent++ > 10) {
- return;
- }
-
- rs.push('hello');
- }
- });
- pipeline(rs, req, common.mustCall(function () {
- _server3.close();
- }));
- req.on('response', function (res) {
- var cnt = 10;
- res.on('data', function () {
- cnt--;
- if (cnt === 0) rs.destroy();
- });
- });
- });
-}
-{
- var makeTransform = function makeTransform() {
- var tr = new Transform({
- transform: function transform(data, enc, cb) {
- cb(null, data);
- }
- });
- tr.on('close', common.mustCall());
- return tr;
- };
-
- var rs = new Readable({
- read: function read() {
- rs.push('hello');
- }
- });
- var _cnt = 10;
- var ws = new Writable({
- write: function write(data, enc, cb) {
- _cnt--;
- if (_cnt === 0) return process.nextTick(cb, new Error('kaboom'));
- cb();
- }
- });
- rs.on('close', common.mustCall());
- ws.on('close', common.mustCall());
- pipeline(rs, makeTransform(), makeTransform(), makeTransform(), makeTransform(), makeTransform(), makeTransform(), ws, common.mustCall(function (err) {
- assert.strictEqual(err.message, 'kaboom');
- }));
-}
-{
- var oldStream = new Stream();
-
- oldStream.pause = oldStream.resume = function () {};
-
- oldStream.write = function (data) {
- oldStream.emit('data', data);
- return true;
- };
-
- oldStream.end = function () {
- oldStream.emit('end');
- };
-
- var _expected = [bufferShim.from('hello'), bufferShim.from('world')];
-
- var _rs = new Readable({
- read: function read() {
- for (var _i = 0; _i < _expected.length; _i++) {
- _rs.push(_expected[_i]);
- }
-
- _rs.push(null);
- }
- });
-
- var _ws = new Writable({
- write: function write(data, enc, cb) {
- assert.deepStrictEqual(data, _expected.shift());
- cb();
- }
- });
-
- var _finished = false;
-
- _ws.on('finish', function () {
- _finished = true;
- });
-
- pipeline(_rs, oldStream, _ws, common.mustCall(function (err) {
- assert(!err, 'no error');
- assert(_finished, 'last stream finished');
- }));
-}
-{
- var _oldStream = new Stream();
-
- _oldStream.pause = _oldStream.resume = function () {};
-
- _oldStream.write = function (data) {
- _oldStream.emit('data', data);
-
- return true;
- };
-
- _oldStream.end = function () {
- _oldStream.emit('end');
- };
-
- var destroyableOldStream = new Stream();
-
- destroyableOldStream.pause = destroyableOldStream.resume = function () {};
-
- destroyableOldStream.destroy = common.mustCall(function () {
- destroyableOldStream.emit('close');
- });
-
- destroyableOldStream.write = function (data) {
- destroyableOldStream.emit('data', data);
- return true;
- };
-
- destroyableOldStream.end = function () {
- destroyableOldStream.emit('end');
- };
-
- var _rs2 = new Readable({
- read: function read() {
- _rs2.destroy(new Error('stop'));
- }
- });
-
- var _ws2 = new Writable({
- write: function write(data, enc, cb) {
- cb();
- }
- });
-
- var _finished2 = false;
-
- _ws2.on('finish', function () {
- _finished2 = true;
- });
-
- pipeline(_rs2, _oldStream, destroyableOldStream, _ws2, common.mustCall(function (err) {
- assert.deepStrictEqual(err, new Error('stop'));
- assert(!_finished2, 'should not finish');
- }));
-}
-{
- var pipelinePromise = promisify(pipeline);
-
- function run() {
- return _run.apply(this, arguments);
- }
-
- function _run() {
- _run = _asyncToGenerator(function* () {
- var read = new Readable({
- read: function read() {}
- });
- var write = new Writable({
- write: function write(data, enc, cb) {
- cb();
- }
- });
- read.push('data');
- read.push(null);
- var finished = false;
- write.on('finish', function () {
- finished = true;
- });
- yield pipelinePromise(read, write);
- assert(finished);
- });
- return _run.apply(this, arguments);
- }
-
- run();
-}
-{
- var _read5 = new Readable({
- read: function read() {}
- });
-
- var _transform = new Transform({
- transform: function transform(data, enc, cb) {
- process.nextTick(cb, new Error('kaboom'));
- }
- });
-
- var _write4 = new Writable({
- write: function write(data, enc, cb) {
- cb();
- }
- });
-
- _read5.on('close', common.mustCall());
-
- _transform.on('close', common.mustCall());
-
- _write4.on('close', common.mustCall());
-
- process.on('uncaughtException', common.mustCall(function (err) {
- assert.strictEqual(err.message, 'kaboom');
- }));
-
- var _dst2 = pipeline(_read5, _transform, _write4);
-
- assert.strictEqual(_dst2, _write4);
-
- _read5.push('hello');
-}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
diff --git a/test/parallel/test-stream-preprocess.js b/test/parallel/test-stream-preprocess.js
new file mode 100644
index 0000000000..0cf027e56d
--- /dev/null
+++ b/test/parallel/test-stream-preprocess.js
@@ -0,0 +1,89 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const fs = require('fs')
+
+const rl = require('readline')
+
+const fixtures = require('../common/fixtures')
+
+const BOM = '\uFEFF' // Get the data using a non-stream way to compare with the streamed data.
+
+const modelData = fixtures.readSync('file-to-read-without-bom.txt', 'utf8')
+const modelDataFirstCharacter = modelData[0] // Detect the number of forthcoming 'line' events for mustCall() 'expected' arg.
+
+const lineCount = modelData.match(/\n/g).length // Ensure both without-bom and with-bom test files are textwise equal.
+
+assert.strictEqual(fixtures.readSync('file-to-read-with-bom.txt', 'utf8'), `${BOM}${modelData}`) // An unjustified BOM stripping with a non-BOM character unshifted to a stream.
+
+const inputWithoutBOM = fs.createReadStream(fixtures.path('file-to-read-without-bom.txt'), 'utf8')
+inputWithoutBOM.once(
+ 'readable',
+ common.mustCall(() => {
+ const maybeBOM = inputWithoutBOM.read(1)
+ assert.strictEqual(maybeBOM, modelDataFirstCharacter)
+ assert.notStrictEqual(maybeBOM, BOM)
+ inputWithoutBOM.unshift(maybeBOM)
+ let streamedData = ''
+ rl.createInterface({
+ input: inputWithoutBOM
+ })
+ .on(
+ 'line',
+ common.mustCall((line) => {
+ streamedData += `${line}\n`
+ }, lineCount)
+ )
+ .on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\r\n/g, '\n') : modelData)
+ })
+ )
+ })
+) // A justified BOM stripping.
+
+const inputWithBOM = fs.createReadStream(fixtures.path('file-to-read-with-bom.txt'), 'utf8')
+inputWithBOM.once(
+ 'readable',
+ common.mustCall(() => {
+ const maybeBOM = inputWithBOM.read(1)
+ assert.strictEqual(maybeBOM, BOM)
+ let streamedData = ''
+ rl.createInterface({
+ input: inputWithBOM
+ })
+ .on(
+ 'line',
+ common.mustCall((line) => {
+ streamedData += `${line}\n`
+ }, lineCount)
+ )
+ .on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\r\n/g, '\n') : modelData)
+ })
+ )
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-promises.js b/test/parallel/test-stream-promises.js
new file mode 100644
index 0000000000..eb050f6a4d
--- /dev/null
+++ b/test/parallel/test-stream-promises.js
@@ -0,0 +1,109 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const stream = require('../../lib/ours/index')
+
+const { Readable, Writable, promises } = stream
+
+const { finished, pipeline } = require('../../lib/stream/promises')
+
+const fs = require('fs')
+
+const assert = require('assert')
+
+const { promisify } = require('util')
+
+assert.strictEqual(promises.pipeline, pipeline)
+assert.strictEqual(promises.finished, finished)
+assert.strictEqual(pipeline, promisify(stream.pipeline))
+assert.strictEqual(finished, promisify(stream.finished)) // pipeline success
+
+{
+ let finished = false
+ const processed = []
+ const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')]
+ const read = new Readable({
+ read() {}
+ })
+ const write = new Writable({
+ write(data, enc, cb) {
+ processed.push(data)
+ cb()
+ }
+ })
+ write.on('finish', () => {
+ finished = true
+ })
+
+ for (let i = 0; i < expected.length; i++) {
+ read.push(expected[i])
+ }
+
+ read.push(null)
+ pipeline(read, write).then(
+ common.mustCall((value) => {
+ assert.ok(finished)
+ assert.deepStrictEqual(processed, expected)
+ })
+ )
+} // pipeline error
+
+{
+ const read = new Readable({
+ read() {}
+ })
+ const write = new Writable({
+ write(data, enc, cb) {
+ cb()
+ }
+ })
+ read.push('data')
+ setImmediate(() => read.destroy())
+ pipeline(read, write).catch(
+ common.mustCall((err) => {
+ assert.ok(err, 'should have an error')
+ })
+ )
+} // finished success
+
+{
+ async function run() {
+ const rs = fs.createReadStream(__filename)
+ let ended = false
+ rs.resume()
+ rs.on('end', () => {
+ ended = true
+ })
+ await finished(rs)
+ assert(ended)
+ }
+
+ run().then(common.mustCall())
+} // finished error
+
+{
+ const rs = fs.createReadStream('file-does-not-exist')
+ assert
+ .rejects(finished(rs), {
+ code: 'ENOENT'
+ })
+ .then(common.mustCall())
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-push-order.js b/test/parallel/test-stream-push-order.js
index 4dd13be724..d500718981 100644
--- a/test/parallel/test-stream-push-order.js
+++ b/test/parallel/test-stream-push-order.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,57 +18,52 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var Readable = require('../../').Readable;
+const Readable = require('../../lib/ours/index').Readable
-var assert = require('assert/');
+const assert = require('assert')
-var s = new Readable({
+const s = new Readable({
highWaterMark: 20,
encoding: 'ascii'
-});
-var list = ['1', '2', '3', '4', '5', '6'];
+})
+const list = ['1', '2', '3', '4', '5', '6']
s._read = function (n) {
- var one = list.shift();
+ const one = list.shift()
if (!one) {
- s.push(null);
+ s.push(null)
} else {
- var two = list.shift();
- s.push(one);
- s.push(two);
+ const two = list.shift()
+ s.push(one)
+ s.push(two)
}
-};
+}
-s.read(0); // ACTUALLY [1, 3, 5, 6, 4, 2]
+s.read(0) // ACTUALLY [1, 3, 5, 6, 4, 2]
process.on('exit', function () {
- assert.deepStrictEqual(s.readableBuffer.join(','), '1,2,3,4,5,6');
-
- require('tap').pass();
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(s.readableBuffer.join(','), '1,2,3,4,5,6')
+ silentConsole.log('ok')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-push-strings.js b/test/parallel/test-stream-push-strings.js
index f2555d6241..bb80a8c0f5 100644
--- a/test/parallel/test-stream-push-strings.js
+++ b/test/parallel/test-stream-push-strings.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,94 +18,71 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-require('../common');
-
-var assert = require('assert/');
+const tap = require('tap')
-var Readable = require('../../').Readable;
+const silentConsole = {
+ log() {},
-var MyStream =
-/*#__PURE__*/
-function (_Readable) {
- _inherits(MyStream, _Readable);
+ error() {}
+}
+require('../common')
- function MyStream(options) {
- var _this;
+const assert = require('assert')
- _classCallCheck(this, MyStream);
+const Readable = require('../../lib/ours/index').Readable
- _this = _possibleConstructorReturn(this, _getPrototypeOf(MyStream).call(this, options));
- _this._chunks = 3;
- return _this;
+class MyStream extends Readable {
+ constructor(options) {
+ super(options)
+ this._chunks = 3
}
- _createClass(MyStream, [{
- key: "_read",
- value: function _read(n) {
- var _this2 = this;
+ _read(n) {
+ switch (this._chunks--) {
+ case 0:
+ return this.push(null)
- switch (this._chunks--) {
- case 0:
- return this.push(null);
+ case 1:
+ return setTimeout(() => {
+ this.push('last chunk')
+ }, 100)
- case 1:
- return setTimeout(function () {
- _this2.push('last chunk');
- }, 100);
+ case 2:
+ return this.push('second to last chunk')
- case 2:
- return this.push('second to last chunk');
+ case 3:
+ return process.nextTick(() => {
+ this.push('first chunk')
+ })
- case 3:
- return process.nextTick(function () {
- _this2.push('first chunk');
- });
-
- default:
- throw new Error('?');
- }
+ default:
+ throw new Error('?')
}
- }]);
-
- return MyStream;
-}(Readable);
+ }
+}
-var ms = new MyStream();
-var results = [];
+const ms = new MyStream()
+const results = []
ms.on('readable', function () {
- var chunk;
+ let chunk
- while (null !== (chunk = ms.read())) {
- results.push(String(chunk));
- }
-});
-var expect = ['first chunksecond to last chunk', 'last chunk'];
+ while (null !== (chunk = ms.read())) results.push(String(chunk))
+})
+const expect = ['first chunksecond to last chunk', 'last chunk']
process.on('exit', function () {
- assert.strictEqual(ms._chunks, -1);
- assert.deepStrictEqual(results, expect);
-
- require('tap').pass();
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(ms._chunks, -1)
+ assert.deepStrictEqual(results, expect)
+ silentConsole.log('ok')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-aborted.js b/test/parallel/test-stream-readable-aborted.js
new file mode 100644
index 0000000000..4d271d5942
--- /dev/null
+++ b/test/parallel/test-stream-readable-aborted.js
@@ -0,0 +1,81 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Readable, Duplex } = require('../../lib/ours/index')
+
+{
+ const readable = new Readable({
+ read() {}
+ })
+ assert.strictEqual(readable.readableAborted, false)
+ readable.destroy()
+ assert.strictEqual(readable.readableAborted, true)
+}
+{
+ const readable = new Readable({
+ read() {}
+ })
+ assert.strictEqual(readable.readableAborted, false)
+ readable.push(null)
+ readable.destroy()
+ assert.strictEqual(readable.readableAborted, true)
+}
+{
+ const readable = new Readable({
+ read() {}
+ })
+ assert.strictEqual(readable.readableAborted, false)
+ readable.push('asd')
+ readable.destroy()
+ assert.strictEqual(readable.readableAborted, true)
+}
+{
+ const readable = new Readable({
+ read() {}
+ })
+ assert.strictEqual(readable.readableAborted, false)
+ readable.push('asd')
+ readable.push(null)
+ assert.strictEqual(readable.readableAborted, false)
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(readable.readableAborted, false)
+ readable.destroy()
+ assert.strictEqual(readable.readableAborted, false)
+ queueMicrotask(() => {
+ assert.strictEqual(readable.readableAborted, false)
+ })
+ })
+ )
+ readable.resume()
+}
+{
+ const duplex = new Duplex({
+ readable: false,
+
+ write() {}
+ })
+ duplex.destroy()
+ assert.strictEqual(duplex.readableAborted, false)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-add-chunk-during-data.js b/test/parallel/test-stream-readable-add-chunk-during-data.js
new file mode 100644
index 0000000000..6728287753
--- /dev/null
+++ b/test/parallel/test-stream-readable-add-chunk-during-data.js
@@ -0,0 +1,45 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Readable } = require('../../lib/ours/index') // Verify that .push() and .unshift() can be called from 'data' listeners.
+
+for (const method of ['push', 'unshift']) {
+ const r = new Readable({
+ read() {}
+ })
+ r.once(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(r.readableLength, 0)
+ r[method](chunk)
+ assert.strictEqual(r.readableLength, chunk.length)
+ r.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk.toString(), 'Hello, world')
+ })
+ )
+ })
+ )
+ r.push('Hello, world')
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-async-iterators.js b/test/parallel/test-stream-readable-async-iterators.js
deleted file mode 100644
index d97cb866bc..0000000000
--- a/test/parallel/test-stream-readable-async-iterators.js
+++ /dev/null
@@ -1,816 +0,0 @@
-"use strict";
-
-function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
-
-function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
-
-function _asyncIterator(iterable) { var method; if (typeof Symbol !== "undefined") { if (Symbol.asyncIterator) { method = iterable[Symbol.asyncIterator]; if (method != null) return method.call(iterable); } if (Symbol.iterator) { method = iterable[Symbol.iterator]; if (method != null) return method.call(iterable); } } throw new TypeError("Object is not async iterable"); }
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var _require = require('../../'),
- Readable = _require.Readable,
- PassThrough = _require.PassThrough,
- pipeline = _require.pipeline;
-
-var assert = require('assert/');
-
-function tests() {
- return _tests.apply(this, arguments);
-} // to avoid missing some tests if a promise does not resolve
-
-
-function _tests() {
- _tests = _asyncToGenerator(function* () {
- {
- var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
- var rs = new Readable({});
- assert.strictEqual(Object.getPrototypeOf(Object.getPrototypeOf(rs[Symbol.asyncIterator]())), AsyncIteratorPrototype);
- }
- {
- var readable = new Readable({
- objectMode: true,
- read: function read() {}
- });
- readable.push(0);
- readable.push(1);
- readable.push(null);
- var iter = readable[Symbol.asyncIterator]();
- assert.strictEqual((yield iter.next()).value, 0);
- var _iteratorNormalCompletion = true;
- var _didIteratorError = false;
-
- var _iteratorError;
-
- try {
- for (var _iterator = _asyncIterator(iter), _step, _value; _step = yield _iterator.next(), _iteratorNormalCompletion = _step.done, _value = yield _step.value, !_iteratorNormalCompletion; _iteratorNormalCompletion = true) {
- var d = _value;
- assert.strictEqual(d, 1);
- }
- } catch (err) {
- _didIteratorError = true;
- _iteratorError = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion && _iterator.return != null) {
- yield _iterator.return();
- }
- } finally {
- if (_didIteratorError) {
- throw _iteratorError;
- }
- }
- }
- }
- {
- console.log('read without for..await');
- var max = 5;
-
- var _readable = new Readable({
- objectMode: true,
- read: function read() {}
- });
-
- var _iter = _readable[Symbol.asyncIterator]();
-
- assert.strictEqual(_iter.stream, _readable);
- var values = [];
-
- for (var i = 0; i < max; i++) {
- values.push(_iter.next());
- }
-
- Promise.all(values).then(common.mustCall(function (values) {
- values.forEach(common.mustCall(function (item, i) {
- return assert.strictEqual(item.value, 'hello-' + i);
- }, 5));
- }));
-
- _readable.push('hello-0');
-
- _readable.push('hello-1');
-
- _readable.push('hello-2');
-
- _readable.push('hello-3');
-
- _readable.push('hello-4');
-
- _readable.push(null);
-
- var last = yield _iter.next();
- assert.strictEqual(last.done, true);
- }
- {
- console.log('read without for..await deferred');
-
- var _readable2 = new Readable({
- objectMode: true,
- read: function read() {}
- });
-
- var _iter2 = _readable2[Symbol.asyncIterator]();
-
- assert.strictEqual(_iter2.stream, _readable2);
- var _values = [];
-
- for (var _i = 0; _i < 3; _i++) {
- _values.push(_iter2.next());
- }
-
- _readable2.push('hello-0');
-
- _readable2.push('hello-1');
-
- _readable2.push('hello-2');
-
- var k = 0;
- var results1 = yield Promise.all(_values);
- results1.forEach(common.mustCall(function (item) {
- return assert.strictEqual(item.value, 'hello-' + k++);
- }, 3));
- _values = [];
-
- for (var _i2 = 0; _i2 < 2; _i2++) {
- _values.push(_iter2.next());
- }
-
- _readable2.push('hello-3');
-
- _readable2.push('hello-4');
-
- _readable2.push(null);
-
- var results2 = yield Promise.all(_values);
- results2.forEach(common.mustCall(function (item) {
- return assert.strictEqual(item.value, 'hello-' + k++);
- }, 2));
-
- var _last = yield _iter2.next();
-
- assert.strictEqual(_last.done, true);
- }
- {
- console.log('read without for..await with errors');
- var _max = 3;
-
- var _readable3 = new Readable({
- objectMode: true,
- read: function read() {}
- });
-
- var _iter3 = _readable3[Symbol.asyncIterator]();
-
- assert.strictEqual(_iter3.stream, _readable3);
- var _values2 = [];
- var errors = [];
-
- var _i3;
-
- for (_i3 = 0; _i3 < _max; _i3++) {
- _values2.push(_iter3.next());
- }
-
- for (_i3 = 0; _i3 < 2; _i3++) {
- errors.push(_iter3.next());
- }
-
- _readable3.push('hello-0');
-
- _readable3.push('hello-1');
-
- _readable3.push('hello-2');
-
- var resolved = yield Promise.all(_values2);
- resolved.forEach(common.mustCall(function (item, i) {
- return assert.strictEqual(item.value, 'hello-' + i);
- }, _max));
- errors.forEach(function (promise) {
- promise.catch(common.mustCall(function (err) {
- assert.strictEqual(err.message, 'kaboom');
- }));
- });
-
- _readable3.destroy(new Error('kaboom'));
- }
- {
- console.log('call next() after error');
-
- var _readable4 = new Readable({
- read: function read() {}
- });
-
- var iterator = _readable4[Symbol.asyncIterator]();
-
- var err = new Error('kaboom');
-
- _readable4.destroy(new Error('kaboom'));
-
- yield function (f, e) {
- var success = false;
- f().then(function () {
- success = true;
- throw new Error('should not succeed');
- }).catch(function (e2) {
- if (success) {
- throw e2;
- }
-
- assert.strictEqual(e.message, e2.message);
- });
- }(iterator.next.bind(iterator), err);
- }
- {
- console.log('read object mode');
- var _max2 = 42;
- var readed = 0;
- var received = 0;
-
- var _readable5 = new Readable({
- objectMode: true,
- read: function read() {
- this.push('hello');
-
- if (++readed === _max2) {
- this.push(null);
- }
- }
- });
-
- var _iteratorNormalCompletion2 = true;
- var _didIteratorError2 = false;
-
- var _iteratorError2;
-
- try {
- for (var _iterator2 = _asyncIterator(_readable5), _step2, _value2; _step2 = yield _iterator2.next(), _iteratorNormalCompletion2 = _step2.done, _value2 = yield _step2.value, !_iteratorNormalCompletion2; _iteratorNormalCompletion2 = true) {
- var _k = _value2;
- received++;
- assert.strictEqual(_k, 'hello');
- }
- } catch (err) {
- _didIteratorError2 = true;
- _iteratorError2 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion2 && _iterator2.return != null) {
- yield _iterator2.return();
- }
- } finally {
- if (_didIteratorError2) {
- throw _iteratorError2;
- }
- }
- }
-
- assert.strictEqual(readed, received);
- }
- {
- console.log('destroy sync');
-
- var _readable6 = new Readable({
- objectMode: true,
- read: function read() {
- this.destroy(new Error('kaboom from read'));
- }
- });
-
- var _err;
-
- try {
- // eslint-disable-next-line no-unused-vars
- var _iteratorNormalCompletion3 = true;
- var _didIteratorError3 = false;
-
- var _iteratorError3;
-
- try {
- for (var _iterator3 = _asyncIterator(_readable6), _step3, _value3; _step3 = yield _iterator3.next(), _iteratorNormalCompletion3 = _step3.done, _value3 = yield _step3.value, !_iteratorNormalCompletion3; _iteratorNormalCompletion3 = true) {
- var _k2 = _value3;
- }
- } catch (err) {
- _didIteratorError3 = true;
- _iteratorError3 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion3 && _iterator3.return != null) {
- yield _iterator3.return();
- }
- } finally {
- if (_didIteratorError3) {
- throw _iteratorError3;
- }
- }
- }
- } catch (e) {
- _err = e;
- }
-
- assert.strictEqual(_err.message, 'kaboom from read');
- }
- {
- console.log('destroy async');
-
- var _readable7 = new Readable({
- objectMode: true,
- read: function read() {
- var _this = this;
-
- if (!this.pushed) {
- this.push('hello');
- this.pushed = true;
- setImmediate(function () {
- _this.destroy(new Error('kaboom'));
- });
- }
- }
- });
-
- var _received = 0;
- var _err2 = null;
-
- try {
- // eslint-disable-next-line no-unused-vars
- var _iteratorNormalCompletion4 = true;
- var _didIteratorError4 = false;
-
- var _iteratorError4;
-
- try {
- for (var _iterator4 = _asyncIterator(_readable7), _step4, _value4; _step4 = yield _iterator4.next(), _iteratorNormalCompletion4 = _step4.done, _value4 = yield _step4.value, !_iteratorNormalCompletion4; _iteratorNormalCompletion4 = true) {
- var _k3 = _value4;
- _received++;
- }
- } catch (err) {
- _didIteratorError4 = true;
- _iteratorError4 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion4 && _iterator4.return != null) {
- yield _iterator4.return();
- }
- } finally {
- if (_didIteratorError4) {
- throw _iteratorError4;
- }
- }
- }
- } catch (e) {
- _err2 = e;
- }
-
- assert.strictEqual(_err2.message, 'kaboom');
- assert.strictEqual(_received, 1);
- }
- {
- console.log('destroyed by throw');
-
- var _readable8 = new Readable({
- objectMode: true,
- read: function read() {
- this.push('hello');
- }
- });
-
- var _err3 = null;
-
- try {
- var _iteratorNormalCompletion5 = true;
- var _didIteratorError5 = false;
-
- var _iteratorError5;
-
- try {
- for (var _iterator5 = _asyncIterator(_readable8), _step5, _value5; _step5 = yield _iterator5.next(), _iteratorNormalCompletion5 = _step5.done, _value5 = yield _step5.value, !_iteratorNormalCompletion5; _iteratorNormalCompletion5 = true) {
- var _k4 = _value5;
- assert.strictEqual(_k4, 'hello');
- throw new Error('kaboom');
- }
- } catch (err) {
- _didIteratorError5 = true;
- _iteratorError5 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion5 && _iterator5.return != null) {
- yield _iterator5.return();
- }
- } finally {
- if (_didIteratorError5) {
- throw _iteratorError5;
- }
- }
- }
- } catch (e) {
- _err3 = e;
- }
-
- assert.strictEqual(_err3.message, 'kaboom');
- assert.strictEqual(_readable8.destroyed, true);
- }
- {
- console.log('destroyed sync after push');
-
- var _readable9 = new Readable({
- objectMode: true,
- read: function read() {
- this.push('hello');
- this.destroy(new Error('kaboom'));
- }
- });
-
- var _received2 = 0;
- var _err4 = null;
-
- try {
- var _iteratorNormalCompletion6 = true;
- var _didIteratorError6 = false;
-
- var _iteratorError6;
-
- try {
- for (var _iterator6 = _asyncIterator(_readable9), _step6, _value6; _step6 = yield _iterator6.next(), _iteratorNormalCompletion6 = _step6.done, _value6 = yield _step6.value, !_iteratorNormalCompletion6; _iteratorNormalCompletion6 = true) {
- var _k5 = _value6;
- assert.strictEqual(_k5, 'hello');
- _received2++;
- }
- } catch (err) {
- _didIteratorError6 = true;
- _iteratorError6 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion6 && _iterator6.return != null) {
- yield _iterator6.return();
- }
- } finally {
- if (_didIteratorError6) {
- throw _iteratorError6;
- }
- }
- }
- } catch (e) {
- _err4 = e;
- }
-
- assert.strictEqual(_err4.message, 'kaboom');
- assert.strictEqual(_received2, 1);
- }
- {
- console.log('push async');
- var _max3 = 42;
- var _readed = 0;
- var _received3 = 0;
-
- var _readable10 = new Readable({
- objectMode: true,
- read: function read() {
- var _this2 = this;
-
- setImmediate(function () {
- _this2.push('hello');
-
- if (++_readed === _max3) {
- _this2.push(null);
- }
- });
- }
- });
-
- var _iteratorNormalCompletion7 = true;
- var _didIteratorError7 = false;
-
- var _iteratorError7;
-
- try {
- for (var _iterator7 = _asyncIterator(_readable10), _step7, _value7; _step7 = yield _iterator7.next(), _iteratorNormalCompletion7 = _step7.done, _value7 = yield _step7.value, !_iteratorNormalCompletion7; _iteratorNormalCompletion7 = true) {
- var _k6 = _value7;
- _received3++;
- assert.strictEqual(_k6, 'hello');
- }
- } catch (err) {
- _didIteratorError7 = true;
- _iteratorError7 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion7 && _iterator7.return != null) {
- yield _iterator7.return();
- }
- } finally {
- if (_didIteratorError7) {
- throw _iteratorError7;
- }
- }
- }
-
- assert.strictEqual(_readed, _received3);
- }
- {
- console.log('push binary async');
- var _max4 = 42;
- var _readed2 = 0;
-
- var _readable11 = new Readable({
- read: function read() {
- var _this3 = this;
-
- setImmediate(function () {
- _this3.push('hello');
-
- if (++_readed2 === _max4) {
- _this3.push(null);
- }
- });
- }
- });
-
- var expected = '';
-
- _readable11.setEncoding('utf8');
-
- _readable11.pause();
-
- _readable11.on('data', function (chunk) {
- expected += chunk;
- });
-
- var data = '';
- var _iteratorNormalCompletion8 = true;
- var _didIteratorError8 = false;
-
- var _iteratorError8;
-
- try {
- for (var _iterator8 = _asyncIterator(_readable11), _step8, _value8; _step8 = yield _iterator8.next(), _iteratorNormalCompletion8 = _step8.done, _value8 = yield _step8.value, !_iteratorNormalCompletion8; _iteratorNormalCompletion8 = true) {
- var _k7 = _value8;
- data += _k7;
- }
- } catch (err) {
- _didIteratorError8 = true;
- _iteratorError8 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion8 && _iterator8.return != null) {
- yield _iterator8.return();
- }
- } finally {
- if (_didIteratorError8) {
- throw _iteratorError8;
- }
- }
- }
-
- assert.strictEqual(data, expected);
- }
- {
- console.log('.next() on destroyed stream');
-
- var _readable12 = new Readable({
- read: function read() {// no-op
- }
- });
-
- _readable12.destroy();
-
- var _ref = yield _readable12[Symbol.asyncIterator]().next(),
- done = _ref.done;
-
- assert.strictEqual(done, true);
- }
- {
- console.log('.next() on pipelined stream');
-
- var _readable13 = new Readable({
- read: function read() {// no-op
- }
- });
-
- var passthrough = new PassThrough();
-
- var _err5 = new Error('kaboom');
-
- pipeline(_readable13, passthrough, common.mustCall(function (e) {
- assert.strictEqual(e, _err5);
- }));
-
- _readable13.destroy(_err5);
-
- try {
- yield _readable13[Symbol.asyncIterator]().next();
- } catch (e) {
- assert.strictEqual(e, _err5);
- }
- }
- {
- console.log('iterating on an ended stream completes');
- var r = new Readable({
- objectMode: true,
- read: function read() {
- this.push('asdf');
- this.push('hehe');
- this.push(null);
- }
- }); // eslint-disable-next-line no-unused-vars
-
- var _iteratorNormalCompletion9 = true;
- var _didIteratorError9 = false;
-
- var _iteratorError9;
-
- try {
- for (var _iterator9 = _asyncIterator(r), _step9, _value9; _step9 = yield _iterator9.next(), _iteratorNormalCompletion9 = _step9.done, _value9 = yield _step9.value, !_iteratorNormalCompletion9; _iteratorNormalCompletion9 = true) {
- var a = _value9;
- } // eslint-disable-next-line no-unused-vars
-
- } catch (err) {
- _didIteratorError9 = true;
- _iteratorError9 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion9 && _iterator9.return != null) {
- yield _iterator9.return();
- }
- } finally {
- if (_didIteratorError9) {
- throw _iteratorError9;
- }
- }
- }
-
- var _iteratorNormalCompletion10 = true;
- var _didIteratorError10 = false;
-
- var _iteratorError10;
-
- try {
- for (var _iterator10 = _asyncIterator(r), _step10, _value10; _step10 = yield _iterator10.next(), _iteratorNormalCompletion10 = _step10.done, _value10 = yield _step10.value, !_iteratorNormalCompletion10; _iteratorNormalCompletion10 = true) {
- var b = _value10;
- }
- } catch (err) {
- _didIteratorError10 = true;
- _iteratorError10 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion10 && _iterator10.return != null) {
- yield _iterator10.return();
- }
- } finally {
- if (_didIteratorError10) {
- throw _iteratorError10;
- }
- }
- }
- }
- {
- console.log('destroy mid-stream does not error');
-
- var _r = new Readable({
- objectMode: true,
- read: function read() {
- this.push('asdf');
- this.push('hehe');
- }
- }); // eslint-disable-next-line no-unused-vars
-
-
- var _iteratorNormalCompletion11 = true;
- var _didIteratorError11 = false;
-
- var _iteratorError11;
-
- try {
- for (var _iterator11 = _asyncIterator(_r), _step11, _value11; _step11 = yield _iterator11.next(), _iteratorNormalCompletion11 = _step11.done, _value11 = yield _step11.value, !_iteratorNormalCompletion11; _iteratorNormalCompletion11 = true) {
- var _a = _value11;
-
- _r.destroy(null);
- }
- } catch (err) {
- _didIteratorError11 = true;
- _iteratorError11 = err;
- } finally {
- try {
- if (!_iteratorNormalCompletion11 && _iterator11.return != null) {
- yield _iterator11.return();
- }
- } finally {
- if (_didIteratorError11) {
- throw _iteratorError11;
- }
- }
- }
- }
- {
- console.log('all next promises must be resolved on end');
-
- var _r2 = new Readable({
- objectMode: true,
- read: function read() {}
- });
-
- var _b = _r2[Symbol.asyncIterator]();
-
- var c = _b.next();
-
- var _d = _b.next();
-
- _r2.push(null);
-
- assert.deepStrictEqual((yield c), {
- done: true,
- value: undefined
- });
- assert.deepStrictEqual((yield _d), {
- done: true,
- value: undefined
- });
- }
- {
- console.log('all next promises must be resolved on destroy');
-
- var _r3 = new Readable({
- objectMode: true,
- read: function read() {}
- });
-
- var _b2 = _r3[Symbol.asyncIterator]();
-
- var _c = _b2.next();
-
- var _d2 = _b2.next();
-
- _r3.destroy();
-
- assert.deepStrictEqual((yield _c), {
- done: true,
- value: undefined
- });
- assert.deepStrictEqual((yield _d2), {
- done: true,
- value: undefined
- });
- }
- {
- console.log('all next promises must be resolved on destroy with error');
-
- var _r4 = new Readable({
- objectMode: true,
- read: function read() {}
- });
-
- var _b3 = _r4[Symbol.asyncIterator]();
-
- var _c2 = _b3.next();
-
- var _d3 = _b3.next();
-
- var _err6 = new Error('kaboom');
-
- _r4.destroy(_err6);
-
- yield Promise.all([_asyncToGenerator(function* () {
- var e;
-
- try {
- yield _c2;
- } catch (_e) {
- e = _e;
- }
-
- assert.strictEqual(e, _err6);
- })(), _asyncToGenerator(function* () {
- var e;
-
- try {
- yield _d3;
- } catch (_e) {
- e = _e;
- }
-
- assert.strictEqual(e, _err6);
- })()]);
- }
- });
- return _tests.apply(this, arguments);
-}
-
-tests().then(common.mustCall(), common.mustNotCall(console.log));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
diff --git a/test/parallel/test-stream-readable-constructor-set-methods.js b/test/parallel/test-stream-readable-constructor-set-methods.js
index d8f683dd2f..cd5bb31fdd 100644
--- a/test/parallel/test-stream-readable-constructor-set-methods.js
+++ b/test/parallel/test-stream-readable-constructor-set-methods.js
@@ -1,36 +1,31 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var Readable = require('../../').Readable;
+const Readable = require('../../lib/ours/index').Readable
-var _read = common.mustCall(function _read(n) {
- this.push(null);
-});
+const _read = common.mustCall(function _read(n) {
+ this.push(null)
+})
-var r = new Readable({
+const r = new Readable({
read: _read
-});
-r.resume();
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+r.resume()
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-data.js b/test/parallel/test-stream-readable-data.js
new file mode 100644
index 0000000000..80db4f024c
--- /dev/null
+++ b/test/parallel/test-stream-readable-data.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+const readable = new Readable({
+ read() {}
+})
+
+function read() {}
+
+readable.setEncoding('utf8')
+readable.on('readable', read)
+readable.removeListener('readable', read)
+process.nextTick(function () {
+ readable.on('data', common.mustCall())
+ readable.push('hello')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-destroy.js b/test/parallel/test-stream-readable-destroy.js
index 439476ab33..1823b1e651 100644
--- a/test/parallel/test-stream-readable-destroy.js
+++ b/test/parallel/test-stream-readable-destroy.js
@@ -1,226 +1,432 @@
-"use strict";
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
-var common = require('../common');
-
-var _require = require('../../'),
- Readable = _require.Readable;
-
-var assert = require('assert/');
-
-{
- var read = new Readable({
- read: function read() {}
- });
- read.resume();
- read.on('close', common.mustCall());
- read.destroy();
- assert.strictEqual(read.destroyed, true);
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
}
-{
- var _read = new Readable({
- read: function read() {}
- });
+/* replacement end */
- _read.resume();
+;('use strict')
- var expected = new Error('kaboom');
+const tap = require('tap')
- _read.on('end', common.mustNotCall('no end event'));
+const silentConsole = {
+ log() {},
- _read.on('close', common.mustCall());
+ error() {}
+}
+const common = require('../common')
- _read.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, expected);
- }));
+const { Readable, addAbortSignal } = require('../../lib/ours/index')
- _read.destroy(expected);
+const assert = require('assert')
- assert.strictEqual(_read.destroyed, true);
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ read.on('close', common.mustCall())
+ read.destroy()
+ assert.strictEqual(read.errored, null)
+ assert.strictEqual(read.destroyed, true)
}
{
- var _read2 = new Readable({
- read: function read() {}
- });
-
- _read2._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, _expected);
- cb(err);
- });
-
- var _expected = new Error('kaboom');
-
- _read2.on('end', common.mustNotCall('no end event'));
-
- _read2.on('close', common.mustCall());
-
- _read2.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, _expected);
- }));
-
- _read2.destroy(_expected);
-
- assert.strictEqual(_read2.destroyed, true);
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ const expected = new Error('kaboom')
+ read.on('end', common.mustNotCall('no end event'))
+ read.on('close', common.mustCall())
+ read.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ read.destroy(expected)
+ assert.strictEqual(read.errored, expected)
+ assert.strictEqual(read.destroyed, true)
}
{
- var _read3 = new Readable({
- read: function read() {},
- destroy: common.mustCall(function (err, cb) {
- assert.strictEqual(err, _expected2);
- cb();
+ const read = new Readable({
+ read() {}
+ })
+ read._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb(err)
+ })
+ const expected = new Error('kaboom')
+ read.on('end', common.mustNotCall('no end event'))
+ read.on('close', common.mustCall())
+ read.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
})
- });
-
- var _expected2 = new Error('kaboom');
-
- _read3.on('end', common.mustNotCall('no end event')); // error is swallowed by the custom _destroy
-
-
- _read3.on('error', common.mustNotCall('no error event'));
-
- _read3.on('close', common.mustCall());
-
- _read3.destroy(_expected2);
-
- assert.strictEqual(_read3.destroyed, true);
+ )
+ read.destroy(expected)
+ assert.strictEqual(read.destroyed, true)
}
{
- var _read4 = new Readable({
- read: function read() {}
- });
+ const read = new Readable({
+ read() {},
- _read4._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, null);
- cb();
- });
-
- _read4.destroy();
-
- assert.strictEqual(_read4.destroyed, true);
+ destroy: common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb()
+ })
+ })
+ const expected = new Error('kaboom')
+ read.on('end', common.mustNotCall('no end event')) // Error is swallowed by the custom _destroy
+
+ read.on('error', common.mustNotCall('no error event'))
+ read.on('close', common.mustCall())
+ read.destroy(expected)
+ assert.strictEqual(read.destroyed, true)
}
{
- var _read5 = new Readable({
- read: function read() {}
- });
-
- _read5.resume();
-
- _read5._destroy = common.mustCall(function (err, cb) {
- var _this = this;
-
- assert.strictEqual(err, null);
- process.nextTick(function () {
- _this.push(null);
-
- cb();
- });
- });
- var fail = common.mustNotCall('no end event');
-
- _read5.on('end', fail);
-
- _read5.on('close', common.mustCall());
-
- _read5.destroy();
-
- _read5.removeListener('end', fail);
-
- _read5.on('end', common.mustCall());
-
- assert.strictEqual(_read5.destroyed, true);
+ const read = new Readable({
+ read() {}
+ })
+ read._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb()
+ })
+ read.destroy()
+ assert.strictEqual(read.destroyed, true)
}
{
- var _read6 = new Readable({
- read: function read() {}
- });
-
- var _expected3 = new Error('kaboom');
-
- _read6._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, null);
- cb(_expected3);
- });
-
- _read6.on('end', common.mustNotCall('no end event'));
-
- _read6.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, _expected3);
- }));
-
- _read6.destroy();
-
- assert.strictEqual(_read6.destroyed, true);
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ read._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ process.nextTick(() => {
+ this.push(null)
+ cb()
+ })
+ })
+ const fail = common.mustNotCall('no end event')
+ read.on('end', fail)
+ read.on('close', common.mustCall())
+ read.destroy()
+ read.removeListener('end', fail)
+ read.on('end', common.mustNotCall())
+ assert.strictEqual(read.destroyed, true)
}
{
- var _read7 = new Readable({
- read: function read() {}
- });
-
- _read7.resume();
-
- _read7.destroyed = true;
- assert.strictEqual(_read7.destroyed, true); // the internal destroy() mechanism should not be triggered
-
- _read7.on('end', common.mustNotCall());
-
- _read7.destroy();
+ const read = new Readable({
+ read() {}
+ })
+ const expected = new Error('kaboom')
+ read._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb(expected)
+ })
+ let ticked = false
+ read.on('end', common.mustNotCall('no end event'))
+ read.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(read._readableState.errorEmitted, true)
+ assert.strictEqual(read._readableState.errored, expected)
+ assert.strictEqual(err, expected)
+ })
+ )
+ read.destroy()
+ assert.strictEqual(read._readableState.errorEmitted, false)
+ assert.strictEqual(read._readableState.errored, expected)
+ assert.strictEqual(read.destroyed, true)
+ ticked = true
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ read.destroyed = true
+ assert.strictEqual(read.destroyed, true) // The internal destroy() mechanism should not be triggered
+
+ read.on('end', common.mustNotCall())
+ read.destroy()
}
{
function MyReadable() {
- assert.strictEqual(this.destroyed, false);
- this.destroyed = false;
- Readable.call(this);
+ assert.strictEqual(this.destroyed, false)
+ this.destroyed = false
+ Readable.call(this)
}
- Object.setPrototypeOf(MyReadable.prototype, Readable.prototype);
- Object.setPrototypeOf(MyReadable, Readable);
- new MyReadable();
+ Object.setPrototypeOf(MyReadable.prototype, Readable.prototype)
+ Object.setPrototypeOf(MyReadable, Readable)
+ new MyReadable()
}
{
- // destroy and destroy callback
- var _read8 = new Readable({
- read: function read() {}
- });
-
- _read8.resume();
-
- var _expected4 = new Error('kaboom');
-
- _read8.on('close', common.mustCall());
-
- _read8.destroy(_expected4, common.mustCall(function (err) {
- assert.strictEqual(err, _expected4);
- }));
+ // Destroy and destroy callback
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ const expected = new Error('kaboom')
+ let ticked = false
+ read.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(read._readableState.errorEmitted, true)
+ assert.strictEqual(ticked, true)
+ })
+ )
+ read.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ assert.strictEqual(read._readableState.errored, null)
+ assert.strictEqual(read._readableState.errorEmitted, false)
+ read.destroy(
+ expected,
+ common.mustCall(function (err) {
+ assert.strictEqual(read._readableState.errored, expected)
+ assert.strictEqual(err, expected)
+ })
+ )
+ assert.strictEqual(read._readableState.errorEmitted, false)
+ assert.strictEqual(read._readableState.errored, expected)
+ ticked = true
}
{
- var _read9 = new Readable({
- read: function read() {}
- });
-
- _read9.destroy();
-
- _read9.push('hi');
-
- _read9.on('data', common.mustNotCall());
+ const readable = new Readable({
+ destroy: common.mustCall(function (err, cb) {
+ process.nextTick(cb, new Error('kaboom 1'))
+ }),
+
+ read() {}
+ })
+ let ticked = false
+ readable.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(readable._readableState.errorEmitted, true)
+ })
+ )
+ readable.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.message, 'kaboom 1')
+ assert.strictEqual(readable._readableState.errorEmitted, true)
+ })
+ )
+ readable.destroy()
+ assert.strictEqual(readable.destroyed, true)
+ assert.strictEqual(readable._readableState.errored, null)
+ assert.strictEqual(readable._readableState.errorEmitted, false) // Test case where `readable.destroy()` is called again with an error before
+ // the `_destroy()` callback is called.
+
+ readable.destroy(new Error('kaboom 2'))
+ assert.strictEqual(readable._readableState.errorEmitted, false)
+ assert.strictEqual(readable._readableState.errored, null)
+ ticked = true
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.destroy()
+ read.push('hi')
+ read.on('data', common.mustNotCall())
+}
+{
+ const read = new Readable({
+ read: common.mustNotCall(function () {})
+ })
+ read.destroy()
+ assert.strictEqual(read.destroyed, true)
+ read.read()
+}
+{
+ const read = new Readable({
+ autoDestroy: false,
+
+ read() {
+ this.push(null)
+ this.push('asd')
+ }
+ })
+ read.on(
+ 'error',
+ common.mustCall(() => {
+ assert(read._readableState.errored)
+ })
+ )
+ read.resume()
+}
+{
+ const controller = new AbortController()
+ const read = addAbortSignal(
+ controller.signal,
+ new Readable({
+ read() {
+ this.push('asd')
+ }
+ })
+ )
+ read.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ })
+ )
+ controller.abort()
+ read.on('data', common.mustNotCall())
+}
+{
+ const controller = new AbortController()
+ const read = new Readable({
+ signal: controller.signal,
+
+ read() {
+ this.push('asd')
+ }
+ })
+ read.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ })
+ )
+ controller.abort()
+ read.on('data', common.mustNotCall())
+}
+{
+ const controller = new AbortController()
+ const read = addAbortSignal(
+ controller.signal,
+ new Readable({
+ objectMode: true,
+
+ read() {
+ return false
+ }
+ })
+ )
+ read.push('asd')
+ read.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ })
+ )
+ assert.rejects(
+ (async () => {
+ // eslint-disable-next-line no-unused-vars, no-empty
+ for await (const chunk of read) {
+ }
+ })(),
+ /AbortError/
+ )
+ setTimeout(() => controller.abort(), 0)
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.on('data', common.mustNotCall())
+ read.on(
+ 'error',
+ common.mustCall((e) => {
+ read.push('asd')
+ read.read()
+ })
+ )
+ read.on(
+ 'close',
+ common.mustCall((e) => {
+ read.push('asd')
+ read.read()
+ })
+ )
+ read.destroy(new Error('asd'))
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.on('data', common.mustNotCall())
+ read.on(
+ 'close',
+ common.mustCall((e) => {
+ read.push('asd')
+ read.read()
+ })
+ )
+ read.destroy()
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.on('data', common.mustNotCall())
+ read.on(
+ 'close',
+ common.mustCall((e) => {
+ read.push('asd')
+ read.unshift('asd')
+ })
+ )
+ read.destroy()
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.on('data', common.mustNotCall())
+ read.destroy()
+ read.unshift('asd')
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ read.on('data', common.mustNotCall())
+ read.on(
+ 'close',
+ common.mustCall((e) => {
+ read.push('asd')
+ })
+ )
+ read.destroy()
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.on('data', common.mustNotCall())
+ read.destroy()
+ read.push('asd')
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-didRead.js b/test/parallel/test-stream-readable-didRead.js
new file mode 100644
index 0000000000..96266e1f27
--- /dev/null
+++ b/test/parallel/test-stream-readable-didRead.js
@@ -0,0 +1,131 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { isDisturbed, isErrored, Readable } = require('../../lib/ours/index')
+
+function noop() {}
+
+function check(readable, data, fn) {
+ assert.strictEqual(readable.readableDidRead, false)
+ assert.strictEqual(isDisturbed(readable), false)
+ assert.strictEqual(isErrored(readable), false)
+
+ if (data === -1) {
+ readable.on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(isErrored(readable), true)
+ })
+ )
+ readable.on('data', common.mustNotCall())
+ readable.on('end', common.mustNotCall())
+ } else {
+ readable.on('error', common.mustNotCall())
+
+ if (data === -2) {
+ readable.on('end', common.mustNotCall())
+ } else {
+ readable.on('end', common.mustCall())
+ }
+
+ if (data > 0) {
+ readable.on('data', common.mustCallAtLeast(data))
+ } else {
+ readable.on('data', common.mustNotCall())
+ }
+ }
+
+ readable.on('close', common.mustCall())
+ fn()
+ setImmediate(() => {
+ assert.strictEqual(readable.readableDidRead, data > 0)
+
+ if (data > 0) {
+ assert.strictEqual(isDisturbed(readable), true)
+ }
+ })
+}
+
+{
+ const readable = new Readable({
+ read() {
+ this.push(null)
+ }
+ })
+ check(readable, 0, () => {
+ readable.read()
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push(null)
+ }
+ })
+ check(readable, 0, () => {
+ readable.resume()
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push(null)
+ }
+ })
+ check(readable, -2, () => {
+ readable.destroy()
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push(null)
+ }
+ })
+ check(readable, -1, () => {
+ readable.destroy(new Error())
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push('data')
+ this.push(null)
+ }
+ })
+ check(readable, 1, () => {
+ readable.on('data', noop)
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push('data')
+ this.push(null)
+ }
+ })
+ check(readable, 1, () => {
+ readable.on('data', noop)
+ readable.off('data', noop)
+ })
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-emit-readable-short-stream.js b/test/parallel/test-stream-readable-emit-readable-short-stream.js
new file mode 100644
index 0000000000..72834baa01
--- /dev/null
+++ b/test/parallel/test-stream-readable-emit-readable-short-stream.js
@@ -0,0 +1,157 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const stream = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+{
+ const r = new stream.Readable({
+ read: common.mustCall(function () {
+ this.push('content')
+ this.push(null)
+ })
+ })
+ const t = new stream.Transform({
+ transform: common.mustCall(function (chunk, encoding, callback) {
+ this.push(chunk)
+ return callback()
+ }),
+ flush: common.mustCall(function (callback) {
+ return callback()
+ })
+ })
+ r.pipe(t)
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ }, 2)
+ )
+}
+{
+ const t = new stream.Transform({
+ transform: common.mustCall(function (chunk, encoding, callback) {
+ this.push(chunk)
+ return callback()
+ }),
+ flush: common.mustCall(function (callback) {
+ return callback()
+ })
+ })
+ t.end('content')
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ })
+ )
+}
+{
+ const t = new stream.Transform({
+ transform: common.mustCall(function (chunk, encoding, callback) {
+ this.push(chunk)
+ return callback()
+ }),
+ flush: common.mustCall(function (callback) {
+ return callback()
+ })
+ })
+ t.write('content')
+ t.end()
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ })
+ )
+}
+{
+ const t = new stream.Readable({
+ read() {}
+ })
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ })
+ )
+ t.push('content')
+ t.push(null)
+}
+{
+ const t = new stream.Readable({
+ read() {}
+ })
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ }, 2)
+ )
+ process.nextTick(() => {
+ t.push('content')
+ t.push(null)
+ })
+}
+{
+ const t = new stream.Transform({
+ transform: common.mustCall(function (chunk, encoding, callback) {
+ this.push(chunk)
+ return callback()
+ }),
+ flush: common.mustCall(function (callback) {
+ return callback()
+ })
+ })
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ }, 2)
+ )
+ t.write('content')
+ t.end()
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-emittedReadable.js b/test/parallel/test-stream-readable-emittedReadable.js
index d08ffce14b..34970f6e88 100644
--- a/test/parallel/test-stream-readable-emittedReadable.js
+++ b/test/parallel/test-stream-readable-emittedReadable.js
@@ -1,87 +1,101 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var Readable = require('../../').Readable;
+const Readable = require('../../lib/ours/index').Readable
-var readable = new Readable({
- read: function read() {}
-}); // Initialized to false.
+const readable = new Readable({
+ read: () => {}
+}) // Initialized to false.
-assert.strictEqual(readable._readableState.emittedReadable, false);
-var expected = [bufferShim.from('foobar'), bufferShim.from('quo'), null];
-readable.on('readable', common.mustCall(function () {
- // emittedReadable should be true when the readable event is emitted
- assert.strictEqual(readable._readableState.emittedReadable, true);
- assert.deepStrictEqual(readable.read(), expected.shift()); // emittedReadable is reset to false during read()
+assert.strictEqual(readable._readableState.emittedReadable, false)
+const expected = [Buffer.from('foobar'), Buffer.from('quo'), null]
+readable.on(
+ 'readable',
+ common.mustCall(() => {
+ // emittedReadable should be true when the readable event is emitted
+ assert.strictEqual(readable._readableState.emittedReadable, true)
+ assert.deepStrictEqual(readable.read(), expected.shift()) // emittedReadable is reset to false during read()
- assert.strictEqual(readable._readableState.emittedReadable, false);
-}, 3)); // When the first readable listener is just attached,
+ assert.strictEqual(readable._readableState.emittedReadable, false)
+ }, 3)
+) // When the first readable listener is just attached,
// emittedReadable should be false
-assert.strictEqual(readable._readableState.emittedReadable, false); // These trigger a single 'readable', as things are batched up
-
-process.nextTick(common.mustCall(function () {
- readable.push('foo');
-}));
-process.nextTick(common.mustCall(function () {
- readable.push('bar');
-})); // these triggers two readable events
-
-setImmediate(common.mustCall(function () {
- readable.push('quo');
- process.nextTick(common.mustCall(function () {
- readable.push(null);
- }));
-}));
-var noRead = new Readable({
- read: function read() {}
-});
-noRead.on('readable', common.mustCall(function () {
- // emittedReadable should be true when the readable event is emitted
- assert.strictEqual(noRead._readableState.emittedReadable, true);
- noRead.read(0); // emittedReadable is not reset during read(0)
-
- assert.strictEqual(noRead._readableState.emittedReadable, true);
-}));
-noRead.push('foo');
-noRead.push(null);
-var flowing = new Readable({
- read: function read() {}
-});
-flowing.on('data', common.mustCall(function () {
- // When in flowing mode, emittedReadable is always false.
- assert.strictEqual(flowing._readableState.emittedReadable, false);
- flowing.read();
- assert.strictEqual(flowing._readableState.emittedReadable, false);
-}, 3));
-flowing.push('foooo');
-flowing.push('bar');
-flowing.push('quo');
-process.nextTick(common.mustCall(function () {
- flowing.push(null);
-}));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+assert.strictEqual(readable._readableState.emittedReadable, false) // These trigger a single 'readable', as things are batched up
+
+process.nextTick(
+ common.mustCall(() => {
+ readable.push('foo')
+ })
+)
+process.nextTick(
+ common.mustCall(() => {
+ readable.push('bar')
+ })
+) // These triggers two readable events
+
+setImmediate(
+ common.mustCall(() => {
+ readable.push('quo')
+ process.nextTick(
+ common.mustCall(() => {
+ readable.push(null)
+ })
+ )
+ })
+)
+const noRead = new Readable({
+ read: () => {}
+})
+noRead.on(
+ 'readable',
+ common.mustCall(() => {
+ // emittedReadable should be true when the readable event is emitted
+ assert.strictEqual(noRead._readableState.emittedReadable, true)
+ noRead.read(0) // emittedReadable is not reset during read(0)
+
+ assert.strictEqual(noRead._readableState.emittedReadable, true)
+ })
+)
+noRead.push('foo')
+noRead.push(null)
+const flowing = new Readable({
+ read: () => {}
+})
+flowing.on(
+ 'data',
+ common.mustCall(() => {
+ // When in flowing mode, emittedReadable is always false.
+ assert.strictEqual(flowing._readableState.emittedReadable, false)
+ flowing.read()
+ assert.strictEqual(flowing._readableState.emittedReadable, false)
+ }, 3)
+)
+flowing.push('foooo')
+flowing.push('bar')
+flowing.push('quo')
+process.nextTick(
+ common.mustCall(() => {
+ flowing.push(null)
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-end-destroyed.js b/test/parallel/test-stream-readable-end-destroyed.js
new file mode 100644
index 0000000000..9946d8db00
--- /dev/null
+++ b/test/parallel/test-stream-readable-end-destroyed.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+{
+ // Don't emit 'end' after 'close'.
+ const r = new Readable()
+ r.on('end', common.mustNotCall())
+ r.resume()
+ r.destroy()
+ r.on(
+ 'close',
+ common.mustCall(() => {
+ r.push(null)
+ })
+ )
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-ended.js b/test/parallel/test-stream-readable-ended.js
new file mode 100644
index 0000000000..d9b1c36696
--- /dev/null
+++ b/test/parallel/test-stream-readable-ended.js
@@ -0,0 +1,67 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+const assert = require('assert') // basic
+
+{
+ // Find it on Readable.prototype
+ assert(Reflect.has(Readable.prototype, 'readableEnded'))
+} // event
+
+{
+ const readable = new Readable()
+
+ readable._read = () => {
+ // The state ended should start in false.
+ assert.strictEqual(readable.readableEnded, false)
+ readable.push('asd')
+ assert.strictEqual(readable.readableEnded, false)
+ readable.push(null)
+ assert.strictEqual(readable.readableEnded, false)
+ }
+
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(readable.readableEnded, true)
+ })
+ )
+ readable.on(
+ 'data',
+ common.mustCall(() => {
+ assert.strictEqual(readable.readableEnded, false)
+ })
+ )
+} // Verifies no `error` triggered on multiple .push(null) invocations
+
+{
+ const readable = new Readable()
+ readable.on('readable', () => {
+ readable.read()
+ })
+ readable.on('error', common.mustNotCall())
+ readable.on('end', common.mustCall())
+ readable.push('a')
+ readable.push(null)
+ readable.push(null)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-error-end.js b/test/parallel/test-stream-readable-error-end.js
new file mode 100644
index 0000000000..a914d88749
--- /dev/null
+++ b/test/parallel/test-stream-readable-error-end.js
@@ -0,0 +1,34 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+{
+ const r = new Readable({
+ read() {}
+ })
+ r.on('end', common.mustNotCall())
+ r.on('data', common.mustCall())
+ r.on('error', common.mustCall())
+ r.push('asd')
+ r.push(null)
+ r.destroy(new Error('kaboom'))
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-event.js b/test/parallel/test-stream-readable-event.js
index 1b33347760..5e4b402d9b 100644
--- a/test/parallel/test-stream-readable-event.js
+++ b/test/parallel/test-stream-readable-event.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,133 +18,117 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var Readable = require('../../').Readable;
+const Readable = require('../../lib/ours/index').Readable
{
// First test, not reading when the readable is added.
// make sure that on('readable', ...) triggers a readable event.
- var r = new Readable({
+ const r = new Readable({
highWaterMark: 3
- });
- r._read = common.mustNotCall(); // This triggers a 'readable' event, which is lost.
+ })
+ r._read = common.mustNotCall() // This triggers a 'readable' event, which is lost.
- r.push(bufferShim.from('blerg'));
+ r.push(Buffer.from('blerg'))
setTimeout(function () {
- // we're testing what we think we are
- assert(!r._readableState.reading);
- r.on('readable', common.mustCall());
- }, 1);
+ // We're testing what we think we are
+ assert(!r._readableState.reading)
+ r.on('readable', common.mustCall())
+ }, 1)
}
{
- // second test, make sure that readable is re-emitted if there's
+ // Second test, make sure that readable is re-emitted if there's
// already a length, while it IS reading.
- var _r = new Readable({
+ const r = new Readable({
highWaterMark: 3
- });
-
- _r._read = common.mustCall(); // This triggers a 'readable' event, which is lost.
-
- _r.push(bufferShim.from('bl'));
+ })
+ r._read = common.mustCall() // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('bl'))
setTimeout(function () {
- // assert we're testing what we think we are
- assert(_r._readableState.reading);
-
- _r.on('readable', common.mustCall());
- }, 1);
+ // Assert we're testing what we think we are
+ assert(r._readableState.reading)
+ r.on('readable', common.mustCall())
+ }, 1)
}
{
// Third test, not reading when the stream has not passed
// the highWaterMark but *has* reached EOF.
- var _r2 = new Readable({
+ const r = new Readable({
highWaterMark: 30
- });
-
- _r2._read = common.mustNotCall(); // This triggers a 'readable' event, which is lost.
-
- _r2.push(bufferShim.from('blerg'));
-
- _r2.push(null);
+ })
+ r._read = common.mustNotCall() // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('blerg'))
+ r.push(null)
setTimeout(function () {
- // assert we're testing what we think we are
- assert(!_r2._readableState.reading);
-
- _r2.on('readable', common.mustCall());
- }, 1);
+ // Assert we're testing what we think we are
+ assert(!r._readableState.reading)
+ r.on('readable', common.mustCall())
+ }, 1)
}
{
- // pushing a empty string in non-objectMode should
+ // Pushing an empty string in non-objectMode should
// trigger next `read()`.
- var underlyingData = ['', 'x', 'y', '', 'z'];
- var expected = underlyingData.filter(function (data) {
- return data;
- });
- var result = [];
-
- var _r3 = new Readable({
+ const underlyingData = ['', 'x', 'y', '', 'z']
+ const expected = underlyingData.filter((data) => data)
+ const result = []
+ const r = new Readable({
encoding: 'utf8'
- });
-
- _r3._read = function () {
- var _this = this;
+ })
- process.nextTick(function () {
+ r._read = function () {
+ process.nextTick(() => {
if (!underlyingData.length) {
- _this.push(null);
+ this.push(null)
} else {
- _this.push(underlyingData.shift());
+ this.push(underlyingData.shift())
}
- });
- };
-
- _r3.on('readable', function () {
- var data = _r3.read();
-
- if (data !== null) result.push(data);
- });
-
- _r3.on('end', common.mustCall(function () {
- assert.deepStrictEqual(result, expected);
- }));
+ })
+ }
+
+ r.on('readable', () => {
+ const data = r.read()
+ if (data !== null) result.push(data)
+ })
+ r.on(
+ 'end',
+ common.mustCall(() => {
+ assert.deepStrictEqual(result, expected)
+ })
+ )
}
{
// #20923
- var _r4 = new Readable();
+ const r = new Readable()
- _r4._read = function () {// actually doing thing here
- };
+ r._read = function () {
+ // Actually doing thing here
+ }
- _r4.on('data', function () {});
-
- _r4.removeAllListeners();
-
- assert.strictEqual(_r4.eventNames().length, 0);
+ r.on('data', function () {})
+ r.removeAllListeners()
+ assert.strictEqual(r.eventNames().length, 0)
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-flow-recursion.js b/test/parallel/test-stream-readable-flow-recursion.js
index ed98bd3244..e4658af004 100644
--- a/test/parallel/test-stream-readable-flow-recursion.js
+++ b/test/parallel/test-stream-readable-flow-recursion.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,75 +18,70 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/'); // this test verifies that passing a huge number to read(size)
+const assert = require('assert') // This test verifies that passing a huge number to read(size)
// will push up the highWaterMark, and cause the stream to read
// more data continuously, but without triggering a nextTick
// warning or RangeError.
+const Readable = require('../../lib/ours/index').Readable // Throw an error if we trigger a nextTick warning.
-var Readable = require('../../').Readable; // throw an error if we trigger a nextTick warning.
-
-
-process.throwDeprecation = true;
-var stream = new Readable({
+process.throwDeprecation = true
+const stream = new Readable({
highWaterMark: 2
-});
-var reads = 0;
-var total = 5000;
+})
+let reads = 0
+let total = 5000
stream._read = function (size) {
- reads++;
- size = Math.min(size, total);
- total -= size;
- if (size === 0) stream.push(null);else stream.push(bufferShim.allocUnsafe(size));
-};
+ reads++
+ size = Math.min(size, total)
+ total -= size
+ if (size === 0) stream.push(null)
+ else stream.push(Buffer.allocUnsafe(size))
+}
-var depth = 0;
+let depth = 0
function flow(stream, size, callback) {
- depth += 1;
- var chunk = stream.read(size);
- if (!chunk) stream.once('readable', flow.bind(null, stream, size, callback));else callback(chunk);
- depth -= 1;
- console.log("flow(".concat(depth, "): exit"));
+ depth += 1
+ const chunk = stream.read(size)
+ if (!chunk) stream.once('readable', flow.bind(null, stream, size, callback))
+ else callback(chunk)
+ depth -= 1
+ silentConsole.log(`flow(${depth}): exit`)
}
flow(stream, 5000, function () {
- console.log("complete (".concat(depth, ")"));
-});
+ silentConsole.log(`complete (${depth})`)
+})
process.on('exit', function (code) {
- assert.strictEqual(reads, 2); // we pushed up the high water mark
-
- assert.strictEqual(stream.readableHighWaterMark, 8192); // length is 0 right now, because we pulled it all out.
-
- assert.strictEqual(stream.readableLength, 0);
- assert(!code);
- assert.strictEqual(depth, 0);
-
- require('tap').pass();
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(reads, 2) // We pushed up the high water mark
+
+ assert.strictEqual(stream.readableHighWaterMark, 8192) // Length is 0 right now, because we pulled it all out.
+
+ assert.strictEqual(stream.readableLength, 0)
+ assert(!code)
+ assert.strictEqual(depth, 0)
+ silentConsole.log('ok')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-hwm-0-async.js b/test/parallel/test-stream-readable-hwm-0-async.js
index 5b4d102f52..b683b3b407 100644
--- a/test/parallel/test-stream-readable-hwm-0-async.js
+++ b/test/parallel/test-stream-readable-hwm-0-async.js
@@ -1,44 +1,40 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common'); // This test ensures that Readable stream will continue to call _read
+ error() {}
+}
+const common = require('../common') // This test ensures that Readable stream will continue to call _read
// for streams with highWaterMark === 0 once the stream returns data
// by calling push() asynchronously.
+const { Readable } = require('../../lib/ours/index')
-var _require = require('../../'),
- Readable = _require.Readable;
-
-var count = 5;
-var r = new Readable({
+let count = 5
+const r = new Readable({
// Called 6 times: First 5 return data, last one signals end of stream.
- read: common.mustCall(function () {
- process.nextTick(common.mustCall(function () {
- if (count--) r.push('a');else r.push(null);
- }));
+ read: common.mustCall(() => {
+ process.nextTick(
+ common.mustCall(() => {
+ if (count--) r.push('a')
+ else r.push(null)
+ })
+ )
}, 6),
highWaterMark: 0
-});
-r.on('end', common.mustCall());
-r.on('data', common.mustCall(5));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+r.on('end', common.mustCall())
+r.on('data', common.mustCall(5))
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js
index 13f7ec3e49..c26401b35b 100644
--- a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js
+++ b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js
@@ -1,63 +1,72 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common'); // Ensure that subscribing the 'data' event will not make the stream flow.
+ error() {}
+}
+const common = require('../common') // Ensure that subscribing the 'data' event will not make the stream flow.
// The 'data' event will require calling read() by hand.
//
// The test is written for the (somewhat rare) highWaterMark: 0 streams to
// specifically catch any regressions that might occur with these streams.
+const assert = require('assert')
-var assert = require('assert/');
+const { Readable } = require('../../lib/ours/index')
-var _require = require('../../'),
- Readable = _require.Readable;
+const streamData = ['a', null] // Track the calls so we can assert their order later.
-var streamData = ['a', null]; // Track the calls so we can assert their order later.
-
-var calls = [];
-var r = new Readable({
- read: common.mustCall(function () {
- calls.push('_read:' + streamData[0]);
- process.nextTick(function () {
- calls.push('push:' + streamData[0]);
- r.push(streamData.shift());
- });
+const calls = []
+const r = new Readable({
+ read: common.mustCall(() => {
+ calls.push('_read:' + streamData[0])
+ process.nextTick(() => {
+ calls.push('push:' + streamData[0])
+ r.push(streamData.shift())
+ })
}, streamData.length),
highWaterMark: 0,
// Object mode is used here just for testing convenience. It really
// shouldn't affect the order of events. Just the data and its format.
objectMode: true
-});
-assert.strictEqual(r.readableFlowing, null);
-r.on('readable', common.mustCall(function () {
- calls.push('readable');
-}, 2));
-assert.strictEqual(r.readableFlowing, false);
-r.on('data', common.mustCall(function (data) {
- calls.push('data:' + data);
-}, 1));
-r.on('end', common.mustCall(function () {
- calls.push('end');
-}));
-assert.strictEqual(r.readableFlowing, false); // The stream emits the events asynchronously but that's not guaranteed to
+})
+assert.strictEqual(r.readableFlowing, null)
+r.on(
+ 'readable',
+ common.mustCall(() => {
+ calls.push('readable')
+ }, 2)
+)
+assert.strictEqual(r.readableFlowing, false)
+r.on(
+ 'data',
+ common.mustCall((data) => {
+ calls.push('data:' + data)
+ }, 1)
+)
+r.on(
+ 'end',
+ common.mustCall(() => {
+ calls.push('end')
+ })
+)
+assert.strictEqual(r.readableFlowing, false) // The stream emits the events asynchronously but that's not guaranteed to
// happen on the next tick (especially since the _read implementation above
// uses process.nextTick).
//
// We use setImmediate here to give the stream enough time to emit all the
// events it's about to emit.
-setImmediate(function () {
+setImmediate(() => {
// Only the _read, push, readable calls have happened. No data must be
// emitted yet.
- assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable']); // Calling 'r.read()' should trigger the data event.
+ assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable']) // Calling 'r.read()' should trigger the data event.
- assert.strictEqual(r.read(), 'a');
- assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a']); // The next 'read()' will return null because hwm: 0 does not buffer any
+ assert.strictEqual(r.read(), 'a')
+ assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a']) // The next 'read()' will return null because hwm: 0 does not buffer any
// data and the _read implementation above does the push() asynchronously.
//
// Note: This 'null' signals "no data available". It isn't the end-of-stream
@@ -67,40 +76,42 @@ setImmediate(function () {
// Using setImmediate again to give the stream enough time to emit all the
// events it wants to emit.
- assert.strictEqual(r.read(), null);
- setImmediate(function () {
+ assert.strictEqual(r.read(), null)
+ setImmediate(() => {
// There's a new 'readable' event after the data has been pushed.
// The 'end' event will be emitted only after a 'read()'.
//
// This is somewhat special for the case where the '_read' implementation
// calls 'push' asynchronously. If 'push' was synchronous, the 'end' event
// would be emitted here _before_ we call read().
- assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable']);
- assert.strictEqual(r.read(), null); // While it isn't really specified whether the 'end' event should happen
+ assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable'])
+ assert.strictEqual(r.read(), null) // While it isn't really specified whether the 'end' event should happen
// synchronously with read() or not, we'll assert the current behavior
// ('end' event happening on the next tick after read()) so any changes
// to it are noted and acknowledged in the future.
- assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable']);
- process.nextTick(function () {
- assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable', 'end']);
- });
- });
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable'])
+ process.nextTick(() => {
+ assert.deepStrictEqual(calls, [
+ '_read:a',
+ 'push:a',
+ 'readable',
+ 'data:a',
+ '_read:null',
+ 'push:null',
+ 'readable',
+ 'end'
+ ])
+ })
+ })
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-hwm-0.js b/test/parallel/test-stream-readable-hwm-0.js
index c2752273d8..196cfec35b 100644
--- a/test/parallel/test-stream-readable-hwm-0.js
+++ b/test/parallel/test-stream-readable-hwm-0.js
@@ -1,52 +1,48 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common'); // This test ensures that Readable stream will call _read() for streams
+ error() {}
+}
+const common = require('../common') // This test ensures that Readable stream will call _read() for streams
// with highWaterMark === 0 upon .read(0) instead of just trying to
// emit 'readable' event.
+const assert = require('assert')
-var assert = require('assert/');
+const { Readable } = require('../../lib/ours/index')
-var _require = require('../../'),
- Readable = _require.Readable;
-
-var r = new Readable({
- // must be called only once upon setting 'readable' listener
+const r = new Readable({
+ // Must be called only once upon setting 'readable' listener
read: common.mustCall(),
highWaterMark: 0
-});
-var pushedNull = false; // this will trigger read(0) but must only be called after push(null)
+})
+let pushedNull = false // This will trigger read(0) but must only be called after push(null)
// because the we haven't pushed any data
-r.on('readable', common.mustCall(function () {
- assert.strictEqual(r.read(), null);
- assert.strictEqual(pushedNull, true);
-}));
-r.on('end', common.mustCall());
-process.nextTick(function () {
- assert.strictEqual(r.read(), null);
- pushedNull = true;
- r.push(null);
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+r.on(
+ 'readable',
+ common.mustCall(() => {
+ assert.strictEqual(r.read(), null)
+ assert.strictEqual(pushedNull, true)
+ })
+)
+r.on('end', common.mustCall())
+process.nextTick(() => {
+ assert.strictEqual(r.read(), null)
+ pushedNull = true
+ r.push(null)
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-infinite-read.js b/test/parallel/test-stream-readable-infinite-read.js
index f51c0d218c..569e8ee316 100644
--- a/test/parallel/test-stream-readable-infinite-read.js
+++ b/test/parallel/test-stream-readable-infinite-read.js
@@ -1,54 +1,51 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var _require = require('../../'),
- Readable = _require.Readable;
+const { Readable } = require('../../lib/ours/index')
-var buf = bufferShim.alloc(8192);
-var readable = new Readable({
+const buf = Buffer.alloc(8192)
+const readable = new Readable({
read: common.mustCall(function () {
- this.push(buf);
+ this.push(buf)
}, 31)
-});
-var i = 0;
-readable.on('readable', common.mustCall(function () {
- if (i++ === 10) {
- // We will just terminate now.
- process.removeAllListeners('readable');
- return;
- }
-
- var data = readable.read(); // TODO(mcollina): there is something odd in the highWaterMark logic
- // investigate.
-
- if (i === 1) {
- assert.strictEqual(data.length, 8192 * 2);
+})
+let i = 0
+readable.on(
+ 'readable',
+ common.mustCall(function () {
+ if (i++ === 10) {
+ // We will just terminate now.
+ process.removeAllListeners('readable')
+ return
+ }
+
+ const data = readable.read() // TODO(mcollina): there is something odd in the highWaterMark logic
+ // investigate.
+
+ if (i === 1) {
+ assert.strictEqual(data.length, 8192 * 2)
+ } else {
+ assert.strictEqual(data.length, 8192 * 3)
+ }
+ }, 11)
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
} else {
- assert.strictEqual(data.length, 8192 * 3);
+ tap.fail(`test failed - exited code ${code}`)
}
-}, 11));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-invalid-chunk.js b/test/parallel/test-stream-readable-invalid-chunk.js
index 0512890b50..a622105435 100644
--- a/test/parallel/test-stream-readable-invalid-chunk.js
+++ b/test/parallel/test-stream-readable-invalid-chunk.js
@@ -1,48 +1,58 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
-
-var stream = require('../../');
-
-var readable = new stream.Readable({
- read: function read() {}
-});
-
-function checkError(fn) {
- common.expectsError(fn, {
- code: 'ERR_INVALID_ARG_TYPE',
- type: TypeError
- });
+ error() {}
+}
+const common = require('../common')
+
+const stream = require('../../lib/ours/index')
+
+function testPushArg(val) {
+ const readable = new stream.Readable({
+ read: () => {}
+ })
+ readable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_INVALID_ARG_TYPE',
+ name: 'TypeError'
+ })
+ )
+ readable.push(val)
}
-checkError(function () {
- return readable.push([]);
-});
-checkError(function () {
- return readable.push({});
-});
-checkError(function () {
- return readable.push(0);
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+testPushArg([])
+testPushArg({})
+testPushArg(0)
+
+function testUnshiftArg(val) {
+ const readable = new stream.Readable({
+ read: () => {}
+ })
+ readable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_INVALID_ARG_TYPE',
+ name: 'TypeError'
+ })
+ )
+ readable.unshift(val)
+}
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+testUnshiftArg([])
+testUnshiftArg({})
+testUnshiftArg(0)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-needReadable.js b/test/parallel/test-stream-readable-needReadable.js
index df882d2b4b..0a1fd7ce9b 100644
--- a/test/parallel/test-stream-readable-needReadable.js
+++ b/test/parallel/test-stream-readable-needReadable.js
@@ -1,108 +1,137 @@
-"use strict";
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert/');
-
-var Readable = require('../../').Readable;
-
-var readable = new Readable({
- read: function read() {}
-}); // Initialized to false.
-
-assert.strictEqual(readable._readableState.needReadable, false);
-readable.on('readable', common.mustCall(function () {
- // When the readable event fires, needReadable is reset.
- assert.strictEqual(readable._readableState.needReadable, false);
- readable.read();
-})); // If a readable listener is attached, then a readable event is needed.
-
-assert.strictEqual(readable._readableState.needReadable, true);
-readable.push('foo');
-readable.push(null);
-readable.on('end', common.mustCall(function () {
- // No need to emit readable anymore when the stream ends.
- assert.strictEqual(readable._readableState.needReadable, false);
-}));
-var asyncReadable = new Readable({
- read: function read() {}
-});
-asyncReadable.on('readable', common.mustCall(function () {
- if (asyncReadable.read() !== null) {
- // After each read(), the buffer is empty.
- // If the stream doesn't end now,
- // then we need to notify the reader on future changes.
- assert.strictEqual(asyncReadable._readableState.needReadable, true);
- }
-}, 2));
-process.nextTick(common.mustCall(function () {
- asyncReadable.push('foooo');
-}));
-process.nextTick(common.mustCall(function () {
- asyncReadable.push('bar');
-}));
-setImmediate(common.mustCall(function () {
- asyncReadable.push(null);
- assert.strictEqual(asyncReadable._readableState.needReadable, false);
-}));
-var flowing = new Readable({
- read: function read() {}
-}); // Notice this must be above the on('data') call.
-
-flowing.push('foooo');
-flowing.push('bar');
-flowing.push('quo');
-process.nextTick(common.mustCall(function () {
- flowing.push(null);
-})); // When the buffer already has enough data, and the stream is
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const Readable = require('../../lib/ours/index').Readable
+
+const readable = new Readable({
+ read: () => {}
+}) // Initialized to false.
+
+assert.strictEqual(readable._readableState.needReadable, false)
+readable.on(
+ 'readable',
+ common.mustCall(() => {
+ // When the readable event fires, needReadable is reset.
+ assert.strictEqual(readable._readableState.needReadable, false)
+ readable.read()
+ })
+) // If a readable listener is attached, then a readable event is needed.
+
+assert.strictEqual(readable._readableState.needReadable, true)
+readable.push('foo')
+readable.push(null)
+readable.on(
+ 'end',
+ common.mustCall(() => {
+ // No need to emit readable anymore when the stream ends.
+ assert.strictEqual(readable._readableState.needReadable, false)
+ })
+)
+const asyncReadable = new Readable({
+ read: () => {}
+})
+asyncReadable.on(
+ 'readable',
+ common.mustCall(() => {
+ if (asyncReadable.read() !== null) {
+ // After each read(), the buffer is empty.
+ // If the stream doesn't end now,
+ // then we need to notify the reader on future changes.
+ assert.strictEqual(asyncReadable._readableState.needReadable, true)
+ }
+ }, 2)
+)
+process.nextTick(
+ common.mustCall(() => {
+ asyncReadable.push('foooo')
+ })
+)
+process.nextTick(
+ common.mustCall(() => {
+ asyncReadable.push('bar')
+ })
+)
+setImmediate(
+ common.mustCall(() => {
+ asyncReadable.push(null)
+ assert.strictEqual(asyncReadable._readableState.needReadable, false)
+ })
+)
+const flowing = new Readable({
+ read: () => {}
+}) // Notice this must be above the on('data') call.
+
+flowing.push('foooo')
+flowing.push('bar')
+flowing.push('quo')
+process.nextTick(
+ common.mustCall(() => {
+ flowing.push(null)
+ })
+) // When the buffer already has enough data, and the stream is
// in flowing mode, there is no need for the readable event.
-flowing.on('data', common.mustCall(function (data) {
- assert.strictEqual(flowing._readableState.needReadable, false);
-}, 3));
-var slowProducer = new Readable({
- read: function read() {}
-});
-slowProducer.on('readable', common.mustCall(function () {
- if (slowProducer.read(8) === null) {
- // The buffer doesn't have enough data, and the stream is not need,
- // we need to notify the reader when data arrives.
- assert.strictEqual(slowProducer._readableState.needReadable, true);
+flowing.on(
+ 'data',
+ common.mustCall(function (data) {
+ assert.strictEqual(flowing._readableState.needReadable, false)
+ }, 3)
+)
+const slowProducer = new Readable({
+ read: () => {}
+})
+slowProducer.on(
+ 'readable',
+ common.mustCall(() => {
+ const chunk = slowProducer.read(8)
+ const state = slowProducer._readableState
+
+ if (chunk === null) {
+ // The buffer doesn't have enough data, and the stream is not need,
+ // we need to notify the reader when data arrives.
+ assert.strictEqual(state.needReadable, true)
+ } else {
+ assert.strictEqual(state.needReadable, false)
+ }
+ }, 4)
+)
+process.nextTick(
+ common.mustCall(() => {
+ slowProducer.push('foo')
+ process.nextTick(
+ common.mustCall(() => {
+ slowProducer.push('foo')
+ process.nextTick(
+ common.mustCall(() => {
+ slowProducer.push('foo')
+ process.nextTick(
+ common.mustCall(() => {
+ slowProducer.push(null)
+ })
+ )
+ })
+ )
+ })
+ )
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
} else {
- assert.strictEqual(slowProducer._readableState.needReadable, false);
+ tap.fail(`test failed - exited code ${code}`)
}
-}, 4));
-process.nextTick(common.mustCall(function () {
- slowProducer.push('foo');
- process.nextTick(common.mustCall(function () {
- slowProducer.push('foo');
- process.nextTick(common.mustCall(function () {
- slowProducer.push('foo');
- process.nextTick(common.mustCall(function () {
- slowProducer.push(null);
- }));
- }));
- }));
-}));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-next-no-null.js b/test/parallel/test-stream-readable-next-no-null.js
new file mode 100644
index 0000000000..585f340697
--- /dev/null
+++ b/test/parallel/test-stream-readable-next-no-null.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const { mustNotCall, expectsError } = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+async function* generate() {
+ yield null
+}
+
+const stream = Readable.from(generate())
+stream.on(
+ 'error',
+ expectsError({
+ code: 'ERR_STREAM_NULL_VALUES',
+ name: 'TypeError',
+ message: 'May not write null values to stream'
+ })
+)
+stream.on(
+ 'data',
+ mustNotCall((chunk) => {})
+)
+stream.on('end', mustNotCall())
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-no-unneeded-readable.js b/test/parallel/test-stream-readable-no-unneeded-readable.js
index 2da85fb9c9..89235fa261 100644
--- a/test/parallel/test-stream-readable-no-unneeded-readable.js
+++ b/test/parallel/test-stream-readable-no-unneeded-readable.js
@@ -1,87 +1,77 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var _require = require('../../'),
- Readable = _require.Readable,
- PassThrough = _require.PassThrough;
+const { Readable, PassThrough } = require('../../lib/ours/index')
function test(r) {
- var wrapper = new Readable({
- read: function read() {
- var data = r.read();
+ const wrapper = new Readable({
+ read: () => {
+ let data = r.read()
if (data) {
- wrapper.push(data);
- return;
+ wrapper.push(data)
+ return
}
r.once('readable', function () {
- data = r.read();
+ data = r.read()
if (data) {
- wrapper.push(data);
- } // else the end event should fire
-
- });
+ wrapper.push(data)
+ } // else: the end event should fire
+ })
}
- });
+ })
r.once('end', function () {
- wrapper.push(null);
- });
- wrapper.resume();
- wrapper.once('end', common.mustCall());
+ wrapper.push(null)
+ })
+ wrapper.resume()
+ wrapper.once('end', common.mustCall())
}
{
- var source = new Readable({
- read: function read() {}
- });
- source.push('foo');
- source.push('bar');
- source.push(null);
- var pt = source.pipe(new PassThrough());
- test(pt);
+ const source = new Readable({
+ read: () => {}
+ })
+ source.push('foo')
+ source.push('bar')
+ source.push(null)
+ const pt = source.pipe(new PassThrough())
+ test(pt)
}
{
// This is the underlying cause of the above test case.
- var pushChunks = ['foo', 'bar'];
- var r = new Readable({
- read: function read() {
- var chunk = pushChunks.shift();
+ const pushChunks = ['foo', 'bar']
+ const r = new Readable({
+ read: () => {
+ const chunk = pushChunks.shift()
if (chunk) {
// synchronous call
- r.push(chunk);
+ r.push(chunk)
} else {
// asynchronous call
- process.nextTick(function () {
- return r.push(null);
- });
+ process.nextTick(() => r.push(null))
}
}
- });
- test(r);
+ })
+ test(r)
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-object-multi-push-async.js b/test/parallel/test-stream-readable-object-multi-push-async.js
index 8aecbf1546..ff8eb0a837 100644
--- a/test/parallel/test-stream-readable-object-multi-push-async.js
+++ b/test/parallel/test-stream-readable-object-multi-push-async.js
@@ -1,232 +1,206 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var _require = require('../../'),
- Readable = _require.Readable;
+const { Readable } = require('../../lib/ours/index')
-var MAX = 42;
-var BATCH = 10;
+const MAX = 42
+const BATCH = 10
{
- var readable = new Readable({
+ const readable = new Readable({
objectMode: true,
read: common.mustCall(function () {
- var _this = this;
-
- console.log('>> READ');
- fetchData(function (err, data) {
+ silentConsole.log('>> READ')
+ fetchData((err, data) => {
if (err) {
- _this.destroy(err);
-
- return;
+ this.destroy(err)
+ return
}
if (data.length === 0) {
- console.log('pushing null');
-
- _this.push(null);
-
- return;
+ silentConsole.log('pushing null')
+ this.push(null)
+ return
}
- console.log('pushing');
- data.forEach(function (d) {
- return _this.push(d);
- });
- });
+ silentConsole.log('pushing')
+ data.forEach((d) => this.push(d))
+ })
}, Math.floor(MAX / BATCH) + 2)
- });
- var i = 0;
+ })
+ let i = 0
function fetchData(cb) {
if (i > MAX) {
- setTimeout(cb, 10, null, []);
+ setTimeout(cb, 10, null, [])
} else {
- var array = [];
- var max = i + BATCH;
+ const array = []
+ const max = i + BATCH
for (; i < max; i++) {
- array.push(i);
+ array.push(i)
}
- setTimeout(cb, 10, null, array);
+ setTimeout(cb, 10, null, array)
}
}
- readable.on('readable', function () {
- var data;
- console.log('readable emitted');
+ readable.on('readable', () => {
+ let data
+ silentConsole.log('readable emitted')
- while (data = readable.read()) {
- console.log(data);
+ while ((data = readable.read()) !== null) {
+ silentConsole.log(data)
}
- });
- readable.on('end', common.mustCall(function () {
- assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH);
- }));
+ })
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH)
+ })
+ )
}
{
- var _readable = new Readable({
+ const readable = new Readable({
objectMode: true,
read: common.mustCall(function () {
- var _this2 = this;
-
- console.log('>> READ');
- fetchData(function (err, data) {
+ silentConsole.log('>> READ')
+ fetchData((err, data) => {
if (err) {
- _this2.destroy(err);
-
- return;
+ this.destroy(err)
+ return
}
if (data.length === 0) {
- console.log('pushing null');
-
- _this2.push(null);
-
- return;
+ silentConsole.log('pushing null')
+ this.push(null)
+ return
}
- console.log('pushing');
- data.forEach(function (d) {
- return _this2.push(d);
- });
- });
+ silentConsole.log('pushing')
+ data.forEach((d) => this.push(d))
+ })
}, Math.floor(MAX / BATCH) + 2)
- });
-
- var _i = 0;
+ })
+ let i = 0
function fetchData(cb) {
- if (_i > MAX) {
- setTimeout(cb, 10, null, []);
+ if (i > MAX) {
+ setTimeout(cb, 10, null, [])
} else {
- var array = [];
- var max = _i + BATCH;
+ const array = []
+ const max = i + BATCH
- for (; _i < max; _i++) {
- array.push(_i);
+ for (; i < max; i++) {
+ array.push(i)
}
- setTimeout(cb, 10, null, array);
+ setTimeout(cb, 10, null, array)
}
}
- _readable.on('data', function (data) {
- console.log('data emitted', data);
- });
-
- _readable.on('end', common.mustCall(function () {
- assert.strictEqual(_i, (Math.floor(MAX / BATCH) + 1) * BATCH);
- }));
+ readable.on('data', (data) => {
+ silentConsole.log('data emitted', data)
+ })
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH)
+ })
+ )
}
{
- var _readable2 = new Readable({
+ const readable = new Readable({
objectMode: true,
read: common.mustCall(function () {
- var _this3 = this;
-
- console.log('>> READ');
- fetchData(function (err, data) {
+ silentConsole.log('>> READ')
+ fetchData((err, data) => {
if (err) {
- _this3.destroy(err);
-
- return;
+ this.destroy(err)
+ return
}
- console.log('pushing');
- data.forEach(function (d) {
- return _this3.push(d);
- });
+ silentConsole.log('pushing')
+ data.forEach((d) => this.push(d))
if (data[BATCH - 1] >= MAX) {
- console.log('pushing null');
-
- _this3.push(null);
+ silentConsole.log('pushing null')
+ this.push(null)
}
- });
+ })
}, Math.floor(MAX / BATCH) + 1)
- });
-
- var _i2 = 0;
+ })
+ let i = 0
function fetchData(cb) {
- var array = [];
- var max = _i2 + BATCH;
+ const array = []
+ const max = i + BATCH
- for (; _i2 < max; _i2++) {
- array.push(_i2);
+ for (; i < max; i++) {
+ array.push(i)
}
- setTimeout(cb, 10, null, array);
+ setTimeout(cb, 10, null, array)
}
- _readable2.on('data', function (data) {
- console.log('data emitted', data);
- });
-
- _readable2.on('end', common.mustCall(function () {
- assert.strictEqual(_i2, (Math.floor(MAX / BATCH) + 1) * BATCH);
- }));
+ readable.on('data', (data) => {
+ silentConsole.log('data emitted', data)
+ })
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH)
+ })
+ )
}
{
- var _readable3 = new Readable({
+ const readable = new Readable({
objectMode: true,
read: common.mustNotCall()
- });
-
- _readable3.on('data', common.mustNotCall());
-
- _readable3.push(null);
-
- var nextTickPassed = false;
- process.nextTick(function () {
- nextTickPassed = true;
- });
-
- _readable3.on('end', common.mustCall(function () {
- assert.strictEqual(nextTickPassed, true);
- }));
+ })
+ readable.on('data', common.mustNotCall())
+ readable.push(null)
+ let nextTickPassed = false
+ process.nextTick(() => {
+ nextTickPassed = true
+ })
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(nextTickPassed, true)
+ })
+ )
}
{
- var _readable4 = new Readable({
+ const readable = new Readable({
objectMode: true,
read: common.mustCall()
- });
-
- _readable4.on('data', function (data) {
- console.log('data emitted', data);
- });
-
- _readable4.on('end', common.mustCall());
-
- setImmediate(function () {
- _readable4.push('aaa');
-
- _readable4.push(null);
- });
+ })
+ readable.on('data', (data) => {
+ silentConsole.log('data emitted', data)
+ })
+ readable.on('end', common.mustCall())
+ setImmediate(() => {
+ readable.push('aaa')
+ readable.push(null)
+ })
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
+/* replacement start */
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-pause-and-resume.js b/test/parallel/test-stream-readable-pause-and-resume.js
index fe852cf426..b745e7a4d7 100644
--- a/test/parallel/test-stream-readable-pause-and-resume.js
+++ b/test/parallel/test-stream-readable-pause-and-resume.js
@@ -1,61 +1,88 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var _require = require('../../'),
- Readable = _require.Readable;
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
-var common = require('../common');
+const { Readable } = require('../../lib/ours/index')
-var ticks = 18;
-var expectedData = 19;
-var rs = new Readable({
+let ticks = 18
+let expectedData = 19
+const rs = new Readable({
objectMode: true,
- read: function read() {
- if (ticks-- > 0) return process.nextTick(function () {
- return rs.push({});
- });
- rs.push({});
- rs.push(null);
+ read: () => {
+ if (ticks-- > 0) return process.nextTick(() => rs.push({}))
+ rs.push({})
+ rs.push(null)
}
-});
-rs.on('end', common.mustCall());
-readAndPause();
+})
+rs.on('end', common.mustCall())
+readAndPause()
function readAndPause() {
// Does a on(data) -> pause -> wait -> resume -> on(data) ... loop.
// Expects on(data) to never fire if the stream is paused.
- var ondata = common.mustCall(function (data) {
- rs.pause();
- expectedData--;
- if (expectedData <= 0) return;
+ const ondata = common.mustCall((data) => {
+ rs.pause()
+ expectedData--
+ if (expectedData <= 0) return
setImmediate(function () {
- rs.removeListener('data', ondata);
- readAndPause();
- rs.resume();
- });
- }, 1); // only call ondata once
+ rs.removeListener('data', ondata)
+ readAndPause()
+ rs.resume()
+ })
+ }, 1) // Only call ondata once
- rs.on('data', ondata);
+ rs.on('data', ondata)
}
-;
+{
+ const readable = new Readable({
+ read() {}
+ })
-(function () {
- var t = require('tap');
+ function read() {}
- t.pass('sync run');
-})();
+ readable.setEncoding('utf8')
+ readable.on('readable', read)
+ readable.removeListener('readable', read)
+ readable.pause()
+ process.nextTick(function () {
+ assert(readable.isPaused())
+ })
+}
+{
+ const { PassThrough } = require('../../lib/ours/index')
-var _list = process.listeners('uncaughtException');
+ const source3 = new PassThrough()
+ const target3 = new PassThrough()
+ const chunk = Buffer.allocUnsafe(1000)
-process.removeAllListeners('uncaughtException');
+ while (target3.write(chunk));
-_list.pop();
+ source3.pipe(target3)
+ target3.on(
+ 'drain',
+ common.mustCall(() => {
+ assert(!source3.isPaused())
+ })
+ )
+ target3.on('data', () => {})
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-readable-then-resume.js b/test/parallel/test-stream-readable-readable-then-resume.js
index 5764ab8ddd..c5d57d6c4a 100644
--- a/test/parallel/test-stream-readable-readable-then-resume.js
+++ b/test/parallel/test-stream-readable-readable-then-resume.js
@@ -1,53 +1,51 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
-var _require = require('../../'),
- Readable = _require.Readable; // This test verifies that a stream could be resumed after
+const assert = require('assert') // This test verifies that a stream could be resumed after
// removing the readable event in the same tick
+check(
+ new Readable({
+ objectMode: true,
+ highWaterMark: 1,
-check(new Readable({
- objectMode: true,
- highWaterMark: 1,
- read: function read() {
- if (!this.first) {
- this.push('hello');
- this.first = true;
- return;
- }
+ read() {
+ if (!this.first) {
+ this.push('hello')
+ this.first = true
+ return
+ }
- this.push(null);
- }
-}));
+ this.push(null)
+ }
+ })
+)
function check(s) {
- var readableListener = common.mustNotCall();
- s.on('readable', readableListener);
- s.on('end', common.mustCall());
- s.removeListener('readable', readableListener);
- s.resume();
+ const readableListener = common.mustNotCall()
+ s.on('readable', readableListener)
+ s.on('end', common.mustCall())
+ assert.strictEqual(s.removeListener, s.off)
+ s.removeListener('readable', readableListener)
+ s.resume()
}
+/* replacement start */
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-readable.js b/test/parallel/test-stream-readable-readable.js
new file mode 100644
index 0000000000..340a588ce2
--- /dev/null
+++ b/test/parallel/test-stream-readable-readable.js
@@ -0,0 +1,68 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Readable } = require('../../lib/ours/index')
+
+{
+ const r = new Readable({
+ read() {}
+ })
+ assert.strictEqual(r.readable, true)
+ r.destroy()
+ assert.strictEqual(r.readable, false)
+}
+{
+ const mustNotCall = common.mustNotCall()
+ const r = new Readable({
+ read() {}
+ })
+ assert.strictEqual(r.readable, true)
+ r.on('end', mustNotCall)
+ r.resume()
+ r.push(null)
+ assert.strictEqual(r.readable, true)
+ r.off('end', mustNotCall)
+ r.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(r.readable, false)
+ })
+ )
+}
+{
+ const r = new Readable({
+ read: common.mustCall(() => {
+ process.nextTick(() => {
+ r.destroy(new Error())
+ assert.strictEqual(r.readable, false)
+ })
+ })
+ })
+ r.resume()
+ r.on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(r.readable, false)
+ })
+ )
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-reading-readingMore.js b/test/parallel/test-stream-readable-reading-readingMore.js
index a17b4c6cdf..e1d9f526b7 100644
--- a/test/parallel/test-stream-readable-reading-readingMore.js
+++ b/test/parallel/test-stream-readable-reading-readingMore.js
@@ -1,178 +1,167 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var Readable = require('../../').Readable;
+const Readable = require('../../lib/ours/index').Readable
{
- var readable = new Readable({
- read: function read(size) {}
- });
- var state = readable._readableState; // Starting off with false initially.
-
- assert.strictEqual(state.reading, false);
- assert.strictEqual(state.readingMore, false);
- readable.on('data', common.mustCall(function (data) {
- // while in a flowing state with a 'readable' listener
- // we should not be reading more
- if (readable.readableFlowing) assert.strictEqual(state.readingMore, true); // reading as long as we've not ended
-
- assert.strictEqual(state.reading, !state.ended);
- }, 2));
+ const readable = new Readable({
+ read(size) {}
+ })
+ const state = readable._readableState // Starting off with false initially.
+
+ assert.strictEqual(state.reading, false)
+ assert.strictEqual(state.readingMore, false)
+ readable.on(
+ 'data',
+ common.mustCall((data) => {
+ // While in a flowing state with a 'readable' listener
+ // we should not be reading more
+ if (readable.readableFlowing) assert.strictEqual(state.readingMore, true) // Reading as long as we've not ended
+
+ assert.strictEqual(state.reading, !state.ended)
+ }, 2)
+ )
function onStreamEnd() {
// End of stream; state.reading is false
// And so should be readingMore.
- assert.strictEqual(state.readingMore, false);
- assert.strictEqual(state.reading, false);
+ assert.strictEqual(state.readingMore, false)
+ assert.strictEqual(state.reading, false)
}
- var expectedReadingMore = [true, false];
- readable.on('readable', common.mustCall(function () {
- // there is only one readingMore scheduled from on('data'),
- // after which everything is governed by the .read() call
- assert.strictEqual(state.readingMore, expectedReadingMore.shift()); // if the stream has ended, we shouldn't be reading
+ const expectedReadingMore = [true, true, false]
+ readable.on(
+ 'readable',
+ common.mustCall(() => {
+ // There is only one readingMore scheduled from on('data'),
+ // after which everything is governed by the .read() call
+ assert.strictEqual(state.readingMore, expectedReadingMore.shift()) // If the stream has ended, we shouldn't be reading
- assert.strictEqual(state.ended, !state.reading);
- var data = readable.read();
- if (data === null) // reached end of stream
- process.nextTick(common.mustCall(onStreamEnd, 1));
- }, 2));
- readable.on('end', common.mustCall(onStreamEnd));
- readable.push('pushed');
- readable.read(6); // reading
+ assert.strictEqual(state.ended, !state.reading) // Consume all the data
- assert.strictEqual(state.reading, true);
- assert.strictEqual(state.readingMore, true); // add chunk to front
+ while (readable.read() !== null);
- readable.unshift('unshifted'); // end
+ if (expectedReadingMore.length === 0)
+ // Reached end of stream
+ process.nextTick(common.mustCall(onStreamEnd, 1))
+ }, 3)
+ )
+ readable.on('end', common.mustCall(onStreamEnd))
+ readable.push('pushed')
+ readable.read(6) // reading
- readable.push(null);
-}
-{
- var _readable = new Readable({
- read: function read(size) {}
- });
+ assert.strictEqual(state.reading, true)
+ assert.strictEqual(state.readingMore, true) // add chunk to front
- var _state = _readable._readableState; // Starting off with false initially.
+ readable.unshift('unshifted') // end
- assert.strictEqual(_state.reading, false);
- assert.strictEqual(_state.readingMore, false);
-
- _readable.on('data', common.mustCall(function (data) {
- // while in a flowing state without a 'readable' listener
- // we should be reading more
- if (_readable.readableFlowing) assert.strictEqual(_state.readingMore, true); // reading as long as we've not ended
-
- assert.strictEqual(_state.reading, !_state.ended);
- }, 2));
+ readable.push(null)
+}
+{
+ const readable = new Readable({
+ read(size) {}
+ })
+ const state = readable._readableState // Starting off with false initially.
+
+ assert.strictEqual(state.reading, false)
+ assert.strictEqual(state.readingMore, false)
+ readable.on(
+ 'data',
+ common.mustCall((data) => {
+ // While in a flowing state without a 'readable' listener
+ // we should be reading more
+ if (readable.readableFlowing) assert.strictEqual(state.readingMore, true) // Reading as long as we've not ended
+
+ assert.strictEqual(state.reading, !state.ended)
+ }, 2)
+ )
function onStreamEnd() {
// End of stream; state.reading is false
// And so should be readingMore.
- assert.strictEqual(_state.readingMore, false);
- assert.strictEqual(_state.reading, false);
+ assert.strictEqual(state.readingMore, false)
+ assert.strictEqual(state.reading, false)
}
- _readable.on('end', common.mustCall(onStreamEnd));
+ readable.on('end', common.mustCall(onStreamEnd))
+ readable.push('pushed') // Stop emitting 'data' events
- _readable.push('pushed'); // stop emitting 'data' events
+ assert.strictEqual(state.flowing, true)
+ readable.pause() // paused
+ assert.strictEqual(state.reading, false)
+ assert.strictEqual(state.flowing, false)
+ readable.resume()
+ assert.strictEqual(state.reading, false)
+ assert.strictEqual(state.flowing, true) // add chunk to front
- assert.strictEqual(_state.flowing, true);
+ readable.unshift('unshifted') // end
- _readable.pause(); // paused
-
-
- assert.strictEqual(_state.reading, false);
- assert.strictEqual(_state.flowing, false);
-
- _readable.resume();
-
- assert.strictEqual(_state.reading, false);
- assert.strictEqual(_state.flowing, true); // add chunk to front
-
- _readable.unshift('unshifted'); // end
-
-
- _readable.push(null);
+ readable.push(null)
}
{
- var _readable2 = new Readable({
- read: function read(size) {}
- });
-
- var _state2 = _readable2._readableState; // Starting off with false initially.
-
- assert.strictEqual(_state2.reading, false);
- assert.strictEqual(_state2.readingMore, false);
- var onReadable = common.mustNotCall;
-
- _readable2.on('readable', onReadable);
-
- _readable2.on('data', common.mustCall(function (data) {
- // reading as long as we've not ended
- assert.strictEqual(_state2.reading, !_state2.ended);
- }, 2));
-
- _readable2.removeListener('readable', onReadable);
+ const readable = new Readable({
+ read(size) {}
+ })
+ const state = readable._readableState // Starting off with false initially.
+
+ assert.strictEqual(state.reading, false)
+ assert.strictEqual(state.readingMore, false)
+ const onReadable = common.mustNotCall
+ readable.on('readable', onReadable)
+ readable.on(
+ 'data',
+ common.mustCall((data) => {
+ // Reading as long as we've not ended
+ assert.strictEqual(state.reading, !state.ended)
+ }, 2)
+ )
+ readable.removeListener('readable', onReadable)
function onStreamEnd() {
// End of stream; state.reading is false
// And so should be readingMore.
- assert.strictEqual(_state2.readingMore, false);
- assert.strictEqual(_state2.reading, false);
+ assert.strictEqual(state.readingMore, false)
+ assert.strictEqual(state.reading, false)
}
- _readable2.on('end', common.mustCall(onStreamEnd));
-
- _readable2.push('pushed'); // we are still not flowing, we will be resuming in the next tick
-
+ readable.on('end', common.mustCall(onStreamEnd))
+ readable.push('pushed') // We are still not flowing, we will be resuming in the next tick
- assert.strictEqual(_state2.flowing, false); // wait for nextTick, so the readableListener flag resets
+ assert.strictEqual(state.flowing, false) // Wait for nextTick, so the readableListener flag resets
process.nextTick(function () {
- _readable2.resume(); // stop emitting 'data' events
-
-
- assert.strictEqual(_state2.flowing, true);
-
- _readable2.pause(); // paused
-
+ readable.resume() // Stop emitting 'data' events
- assert.strictEqual(_state2.flowing, false);
+ assert.strictEqual(state.flowing, true)
+ readable.pause() // paused
- _readable2.resume();
+ assert.strictEqual(state.flowing, false)
+ readable.resume()
+ assert.strictEqual(state.flowing, true) // add chunk to front
- assert.strictEqual(_state2.flowing, true); // add chunk to front
+ readable.unshift('unshifted') // end
- _readable2.unshift('unshifted'); // end
-
-
- _readable2.push(null);
- });
+ readable.push(null)
+ })
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
+/* replacement start */
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-resume-hwm.js b/test/parallel/test-stream-readable-resume-hwm.js
index 93a4a54226..22bf32ea62 100644
--- a/test/parallel/test-stream-readable-resume-hwm.js
+++ b/test/parallel/test-stream-readable-resume-hwm.js
@@ -1,47 +1,38 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var _require = require('../../'),
- Readable = _require.Readable; // readable.resume() should not lead to a ._read() call being scheduled
+const { Readable } = require('../../lib/ours/index') // readable.resume() should not lead to a ._read() call being scheduled
// when we exceed the high water mark already.
-
-var readable = new Readable({
+const readable = new Readable({
read: common.mustNotCall(),
highWaterMark: 100
-}); // Fill up the internal buffer so that we definitely exceed the HWM:
+}) // Fill up the internal buffer so that we definitely exceed the HWM:
-for (var i = 0; i < 10; i++) {
- readable.push('a'.repeat(200));
-} // Call resume, and pause after one chunk.
+for (let i = 0; i < 10; i++) readable.push('a'.repeat(200)) // Call resume, and pause after one chunk.
// The .pause() is just so that we don’t empty the buffer fully, which would
// be a valid reason to call ._read().
-
-readable.resume();
-readable.once('data', common.mustCall(function () {
- return readable.pause();
-}));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+readable.resume()
+readable.once(
+ 'data',
+ common.mustCall(() => readable.pause())
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-resumeScheduled.js b/test/parallel/test-stream-readable-resumeScheduled.js
index 8aba21c386..a696ba6d5a 100644
--- a/test/parallel/test-stream-readable-resumeScheduled.js
+++ b/test/parallel/test-stream-readable-resumeScheduled.js
@@ -1,91 +1,87 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common'); // Testing Readable Stream resumeScheduled state
-
+ error() {}
+}
+const common = require('../common') // Testing Readable Stream resumeScheduled state
-var assert = require('assert/');
+const assert = require('assert')
-var _require = require('../../'),
- Readable = _require.Readable,
- Writable = _require.Writable;
+const { Readable, Writable } = require('../../lib/ours/index')
{
// pipe() test case
- var r = new Readable({
- read: function read() {}
- });
- var w = new Writable(); // resumeScheduled should start = `false`.
-
- assert.strictEqual(r._readableState.resumeScheduled, false); // calling pipe() should change the state value = true.
-
- r.pipe(w);
- assert.strictEqual(r._readableState.resumeScheduled, true);
- process.nextTick(common.mustCall(function () {
- assert.strictEqual(r._readableState.resumeScheduled, false);
- }));
+ const r = new Readable({
+ read() {}
+ })
+ const w = new Writable() // resumeScheduled should start = `false`.
+
+ assert.strictEqual(r._readableState.resumeScheduled, false) // Calling pipe() should change the state value = true.
+
+ r.pipe(w)
+ assert.strictEqual(r._readableState.resumeScheduled, true)
+ process.nextTick(
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ })
+ )
}
{
// 'data' listener test case
- var _r = new Readable({
- read: function read() {}
- }); // resumeScheduled should start = `false`.
-
-
- assert.strictEqual(_r._readableState.resumeScheduled, false);
-
- _r.push(bufferShim.from([1, 2, 3])); // adding 'data' listener should change the state value
-
-
- _r.on('data', common.mustCall(function () {
- assert.strictEqual(_r._readableState.resumeScheduled, false);
- }));
-
- assert.strictEqual(_r._readableState.resumeScheduled, true);
- process.nextTick(common.mustCall(function () {
- assert.strictEqual(_r._readableState.resumeScheduled, false);
- }));
+ const r = new Readable({
+ read() {}
+ }) // resumeScheduled should start = `false`.
+
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ r.push(Buffer.from([1, 2, 3])) // Adding 'data' listener should change the state value
+
+ r.on(
+ 'data',
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ })
+ )
+ assert.strictEqual(r._readableState.resumeScheduled, true)
+ process.nextTick(
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ })
+ )
}
{
// resume() test case
- var _r2 = new Readable({
- read: function read() {}
- }); // resumeScheduled should start = `false`.
-
-
- assert.strictEqual(_r2._readableState.resumeScheduled, false); // Calling resume() should change the state value.
-
- _r2.resume();
-
- assert.strictEqual(_r2._readableState.resumeScheduled, true);
-
- _r2.on('resume', common.mustCall(function () {
- // The state value should be `false` again
- assert.strictEqual(_r2._readableState.resumeScheduled, false);
- }));
-
- process.nextTick(common.mustCall(function () {
- assert.strictEqual(_r2._readableState.resumeScheduled, false);
- }));
+ const r = new Readable({
+ read() {}
+ }) // resumeScheduled should start = `false`.
+
+ assert.strictEqual(r._readableState.resumeScheduled, false) // Calling resume() should change the state value.
+
+ r.resume()
+ assert.strictEqual(r._readableState.resumeScheduled, true)
+ r.on(
+ 'resume',
+ common.mustCall(() => {
+ // The state value should be `false` again
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ })
+ )
+ process.nextTick(
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ })
+ )
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js
index 31bd3263ce..11a0ed83e6 100644
--- a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js
+++ b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js
@@ -1,101 +1,73 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var _require = require('../../'),
- Readable = _require.Readable;
+const { Readable } = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
{
// Call .setEncoding() while there are bytes already in the buffer.
- var r = new Readable({
- read: function read() {}
- });
- r.push(bufferShim.from('a'));
- r.push(bufferShim.from('b'));
- r.setEncoding('utf8');
- var chunks = [];
- r.on('data', function (chunk) {
- return chunks.push(chunk);
- });
- process.nextTick(function () {
- assert.deepStrictEqual(chunks, ['ab']);
- });
+ const r = new Readable({
+ read() {}
+ })
+ r.push(Buffer.from('a'))
+ r.push(Buffer.from('b'))
+ r.setEncoding('utf8')
+ const chunks = []
+ r.on('data', (chunk) => chunks.push(chunk))
+ process.nextTick(() => {
+ assert.deepStrictEqual(chunks, ['ab'])
+ })
}
{
// Call .setEncoding() while the buffer contains a complete,
// but chunked character.
- var _r = new Readable({
- read: function read() {}
- });
-
- _r.push(bufferShim.from([0xf0]));
-
- _r.push(bufferShim.from([0x9f]));
-
- _r.push(bufferShim.from([0x8e]));
-
- _r.push(bufferShim.from([0x89]));
-
- _r.setEncoding('utf8');
-
- var _chunks = [];
-
- _r.on('data', function (chunk) {
- return _chunks.push(chunk);
- });
-
- process.nextTick(function () {
- assert.deepStrictEqual(_chunks, ['🎉']);
- });
+ const r = new Readable({
+ read() {}
+ })
+ r.push(Buffer.from([0xf0]))
+ r.push(Buffer.from([0x9f]))
+ r.push(Buffer.from([0x8e]))
+ r.push(Buffer.from([0x89]))
+ r.setEncoding('utf8')
+ const chunks = []
+ r.on('data', (chunk) => chunks.push(chunk))
+ process.nextTick(() => {
+ assert.deepStrictEqual(chunks, ['🎉'])
+ })
}
{
// Call .setEncoding() while the buffer contains an incomplete character,
// and finish the character later.
- var _r2 = new Readable({
- read: function read() {}
- });
-
- _r2.push(bufferShim.from([0xf0]));
-
- _r2.push(bufferShim.from([0x9f]));
-
- _r2.setEncoding('utf8');
-
- _r2.push(bufferShim.from([0x8e]));
-
- _r2.push(bufferShim.from([0x89]));
-
- var _chunks2 = [];
-
- _r2.on('data', function (chunk) {
- return _chunks2.push(chunk);
- });
-
- process.nextTick(function () {
- assert.deepStrictEqual(_chunks2, ['🎉']);
- });
+ const r = new Readable({
+ read() {}
+ })
+ r.push(Buffer.from([0xf0]))
+ r.push(Buffer.from([0x9f]))
+ r.setEncoding('utf8')
+ r.push(Buffer.from([0x8e]))
+ r.push(Buffer.from([0x89]))
+ const chunks = []
+ r.on('data', (chunk) => chunks.push(chunk))
+ process.nextTick(() => {
+ assert.deepStrictEqual(chunks, ['🎉'])
+ })
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-setEncoding-null.js b/test/parallel/test-stream-readable-setEncoding-null.js
index c4276b4fbb..0ee0e8ecfc 100644
--- a/test/parallel/test-stream-readable-setEncoding-null.js
+++ b/test/parallel/test-stream-readable-setEncoding-null.js
@@ -1,39 +1,33 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var _require = require('../../'),
- Readable = _require.Readable;
+const { Readable } = require('../../lib/ours/index')
{
- var readable = new Readable({
+ const readable = new Readable({
encoding: 'hex'
- });
- assert.strictEqual(readable._readableState.encoding, 'hex');
- readable.setEncoding(null);
- assert.strictEqual(readable._readableState.encoding, 'utf8');
+ })
+ assert.strictEqual(readable._readableState.encoding, 'hex')
+ readable.setEncoding(null)
+ assert.strictEqual(readable._readableState.encoding, 'utf8')
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-unpipe-resume.js b/test/parallel/test-stream-readable-unpipe-resume.js
new file mode 100644
index 0000000000..deb18e5082
--- /dev/null
+++ b/test/parallel/test-stream-readable-unpipe-resume.js
@@ -0,0 +1,34 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const stream = require('../../lib/ours/index')
+
+const fs = require('fs')
+
+const readStream = fs.createReadStream(process.execPath)
+const transformStream = new stream.Transform({
+ transform: common.mustCall(() => {
+ readStream.unpipe()
+ readStream.resume()
+ })
+})
+readStream.on('end', common.mustCall())
+readStream.pipe(transformStream).resume()
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-unshift.js b/test/parallel/test-stream-readable-unshift.js
new file mode 100644
index 0000000000..380706e0be
--- /dev/null
+++ b/test/parallel/test-stream-readable-unshift.js
@@ -0,0 +1,207 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Readable } = require('../../lib/ours/index')
+
+{
+ // Check that strings are saved as Buffer
+ const readable = new Readable({
+ read() {}
+ })
+ const string = 'abc'
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert(Buffer.isBuffer(chunk))
+ assert.strictEqual(chunk.toString('utf8'), string)
+ }, 1)
+ )
+ readable.unshift(string)
+}
+{
+ // Check that data goes at the beginning
+ const readable = new Readable({
+ read() {}
+ })
+ const unshift = 'front'
+ const push = 'back'
+ const expected = [unshift, push]
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk.toString('utf8'), expected.shift())
+ }, 2)
+ )
+ readable.push(push)
+ readable.unshift(unshift)
+}
+{
+ // Check that buffer is saved with correct encoding
+ const readable = new Readable({
+ read() {}
+ })
+ const encoding = 'base64'
+ const string = Buffer.from('abc').toString(encoding)
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk.toString(encoding), string)
+ }, 1)
+ )
+ readable.unshift(string, encoding)
+}
+{
+ const streamEncoding = 'base64'
+
+ function checkEncoding(readable) {
+ // chunk encodings
+ const encodings = ['utf8', 'binary', 'hex', 'base64']
+ const expected = []
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ const { encoding, string } = expected.pop()
+ assert.strictEqual(chunk.toString(encoding), string)
+ }, encodings.length)
+ )
+
+ for (const encoding of encodings) {
+ const string = 'abc' // If encoding is the same as the state.encoding the string is
+ // saved as is
+
+ const expect = encoding !== streamEncoding ? Buffer.from(string, encoding).toString(streamEncoding) : string
+ expected.push({
+ encoding,
+ string: expect
+ })
+ readable.unshift(string, encoding)
+ }
+ }
+
+ const r1 = new Readable({
+ read() {}
+ })
+ r1.setEncoding(streamEncoding)
+ checkEncoding(r1)
+ const r2 = new Readable({
+ read() {},
+
+ encoding: streamEncoding
+ })
+ checkEncoding(r2)
+}
+{
+ // Both .push & .unshift should have the same behaviour
+ // When setting an encoding, each chunk should be emitted with that encoding
+ const encoding = 'base64'
+
+ function checkEncoding(readable) {
+ const string = 'abc'
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk, Buffer.from(string).toString(encoding))
+ }, 2)
+ )
+ readable.push(string)
+ readable.unshift(string)
+ }
+
+ const r1 = new Readable({
+ read() {}
+ })
+ r1.setEncoding(encoding)
+ checkEncoding(r1)
+ const r2 = new Readable({
+ read() {},
+
+ encoding
+ })
+ checkEncoding(r2)
+}
+{
+ // Check that ObjectMode works
+ const readable = new Readable({
+ objectMode: true,
+
+ read() {}
+ })
+ const chunks = ['a', 1, {}, []]
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk, chunks.pop())
+ }, chunks.length)
+ )
+
+ for (const chunk of chunks) {
+ readable.unshift(chunk)
+ }
+}
+{
+ // Should not throw: https://github.com/nodejs/node/issues/27192
+ const highWaterMark = 50
+
+ class ArrayReader extends Readable {
+ constructor(opt) {
+ super({
+ highWaterMark
+ }) // The error happened only when pushing above hwm
+
+ this.buffer = new Array(highWaterMark * 2).fill(0).map(String)
+ }
+
+ _read(size) {
+ while (this.buffer.length) {
+ const chunk = this.buffer.shift()
+
+ if (!this.buffer.length) {
+ this.push(chunk)
+ this.push(null)
+ return true
+ }
+
+ if (!this.push(chunk)) return
+ }
+ }
+ }
+
+ function onRead() {
+ while (null !== stream.read()) {
+ // Remove the 'readable' listener before unshifting
+ stream.removeListener('readable', onRead)
+ stream.unshift('a')
+ stream.on('data', (chunk) => {
+ silentConsole.log(chunk.length)
+ })
+ break
+ }
+ }
+
+ const stream = new ArrayReader()
+ stream.once('readable', common.mustCall(onRead))
+ stream.on(
+ 'end',
+ common.mustCall(() => {})
+ )
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-with-unimplemented-_read.js b/test/parallel/test-stream-readable-with-unimplemented-_read.js
index 42b8c26134..588bfb2224 100644
--- a/test/parallel/test-stream-readable-with-unimplemented-_read.js
+++ b/test/parallel/test-stream-readable-with-unimplemented-_read.js
@@ -1,36 +1,34 @@
-"use strict";
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var _require = require('../../'),
- Readable = _require.Readable;
-
-var readable = new Readable();
-readable.on('error', common.expectsError({
- code: 'ERR_METHOD_NOT_IMPLEMENTED',
- type: Error,
- message: 'The _read() method is not implemented'
-}));
-readable.read();
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+const readable = new Readable()
+readable.read()
+readable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_METHOD_NOT_IMPLEMENTED',
+ name: 'Error',
+ message: 'The _read() method is not implemented'
+ })
+)
+readable.on('close', common.mustCall())
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readableListening-state.js b/test/parallel/test-stream-readableListening-state.js
index cf6c46b759..5c37e4b148 100644
--- a/test/parallel/test-stream-readableListening-state.js
+++ b/test/parallel/test-stream-readableListening-state.js
@@ -1,51 +1,52 @@
-"use strict";
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert/');
-
-var stream = require('../../');
-
-var r = new stream.Readable({
- read: function read() {}
-}); // readableListening state should start in `false`.
-
-assert.strictEqual(r._readableState.readableListening, false);
-r.on('readable', common.mustCall(function () {
- // Inside the readable event this state should be true.
- assert.strictEqual(r._readableState.readableListening, true);
-}));
-r.push(bufferShim.from('Testing readableListening state'));
-var r2 = new stream.Readable({
- read: function read() {}
-}); // readableListening state should start in `false`.
-
-assert.strictEqual(r2._readableState.readableListening, false);
-r2.on('data', common.mustCall(function (chunk) {
- // readableListening should be false because we don't have
- // a `readable` listener
- assert.strictEqual(r2._readableState.readableListening, false);
-}));
-r2.push(bufferShim.from('Testing readableListening state'));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const stream = require('../../lib/ours/index')
+
+const r = new stream.Readable({
+ read: () => {}
+}) // readableListening state should start in `false`.
+
+assert.strictEqual(r._readableState.readableListening, false)
+r.on(
+ 'readable',
+ common.mustCall(() => {
+ // Inside the readable event this state should be true.
+ assert.strictEqual(r._readableState.readableListening, true)
+ })
+)
+r.push(Buffer.from('Testing readableListening state'))
+const r2 = new stream.Readable({
+ read: () => {}
+}) // readableListening state should start in `false`.
+
+assert.strictEqual(r2._readableState.readableListening, false)
+r2.on(
+ 'data',
+ common.mustCall((chunk) => {
+ // readableListening should be false because we don't have
+ // a `readable` listener
+ assert.strictEqual(r2._readableState.readableListening, false)
+ })
+)
+r2.push(Buffer.from('Testing readableListening state'))
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-reduce.js b/test/parallel/test-stream-reduce.js
new file mode 100644
index 0000000000..4c4f600041
--- /dev/null
+++ b/test/parallel/test-stream-reduce.js
@@ -0,0 +1,215 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+function sum(p, c) {
+ return p + c
+}
+
+{
+ // Does the same thing as `(await stream.toArray()).reduce(...)`
+ ;(async () => {
+ const tests = [
+ [[], sum, 0],
+ [[1], sum, 0],
+ [[1, 2, 3, 4, 5], sum, 0],
+ [[...Array(100).keys()], sum, 0],
+ [['a', 'b', 'c'], sum, ''],
+ [[1, 2], sum],
+ [[1, 2, 3], (x, y) => y]
+ ]
+
+ for (const [values, fn, initial] of tests) {
+ const streamReduce = await Readable.from(values).reduce(fn, initial)
+ const arrayReduce = values.reduce(fn, initial)
+ assert.deepStrictEqual(streamReduce, arrayReduce)
+ } // Does the same thing as `(await stream.toArray()).reduce(...)` with an
+ // asynchronous reducer
+
+ for (const [values, fn, initial] of tests) {
+ const streamReduce = await Readable.from(values)
+ .map(async (x) => x)
+ .reduce(fn, initial)
+ const arrayReduce = values.reduce(fn, initial)
+ assert.deepStrictEqual(streamReduce, arrayReduce)
+ }
+ })().then(common.mustCall())
+}
+{
+ // Works with an async reducer, with or without initial value
+ ;(async () => {
+ const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c, 0)
+ assert.strictEqual(six, 6)
+ })().then(common.mustCall())
+ ;(async () => {
+ const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c)
+ assert.strictEqual(six, 6)
+ })().then(common.mustCall())
+}
+{
+ // Works lazily
+ assert
+ .rejects(
+ Readable.from([1, 2, 3, 4, 5, 6])
+ .map(
+ common.mustCall((x) => {
+ return x
+ }, 3)
+ ) // Two consumed and one buffered by `map` due to default concurrency
+ .reduce(async (p, c) => {
+ if (p === 1) {
+ throw new Error('boom')
+ }
+
+ return c
+ }, 0),
+ /boom/
+ )
+ .then(common.mustCall())
+}
+{
+ // Support for AbortSignal
+ const ac = new AbortController()
+ assert
+ .rejects(
+ async () => {
+ await Readable.from([1, 2, 3]).reduce(
+ async (p, c) => {
+ if (c === 3) {
+ await new Promise(() => {}) // Explicitly do not pass signal here
+ }
+
+ return Promise.resolve()
+ },
+ 0,
+ {
+ signal: ac.signal
+ }
+ )
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+ ac.abort()
+}
+{
+ // Support for AbortSignal - pre aborted
+ const stream = Readable.from([1, 2, 3])
+ assert
+ .rejects(
+ async () => {
+ await stream.reduce(
+ async (p, c) => {
+ if (c === 3) {
+ await new Promise(() => {}) // Explicitly do not pass signal here
+ }
+
+ return Promise.resolve()
+ },
+ 0,
+ {
+ signal: AbortSignal.abort()
+ }
+ )
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(
+ common.mustCall(() => {
+ assert.strictEqual(stream.destroyed, true)
+ })
+ )
+}
+{
+ // Support for AbortSignal - deep
+ const stream = Readable.from([1, 2, 3])
+ assert
+ .rejects(
+ async () => {
+ await stream.reduce(
+ async (p, c, { signal }) => {
+ signal.addEventListener('abort', common.mustCall(), {
+ once: true
+ })
+
+ if (c === 3) {
+ await new Promise(() => {}) // Explicitly do not pass signal here
+ }
+
+ return Promise.resolve()
+ },
+ 0,
+ {
+ signal: AbortSignal.abort()
+ }
+ )
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(
+ common.mustCall(() => {
+ assert.strictEqual(stream.destroyed, true)
+ })
+ )
+}
+{
+ // Error cases
+ assert.rejects(() => Readable.from([]).reduce(1), /TypeError/)
+ assert.rejects(() => Readable.from([]).reduce('5'), /TypeError/)
+ assert.rejects(() => Readable.from([]).reduce((x, y) => x + y, 0, 1), /ERR_INVALID_ARG_TYPE/)
+ assert.rejects(
+ () =>
+ Readable.from([]).reduce((x, y) => x + y, 0, {
+ signal: true
+ }),
+ /ERR_INVALID_ARG_TYPE/
+ )
+}
+{
+ // Test result is a Promise
+ const result = Readable.from([1, 2, 3, 4, 5]).reduce(sum, 0)
+ assert.ok(result instanceof Promise)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-some-find-every.mjs b/test/parallel/test-stream-some-find-every.mjs
new file mode 100644
index 0000000000..34c8e2a8a2
--- /dev/null
+++ b/test/parallel/test-stream-some-find-every.mjs
@@ -0,0 +1,183 @@
+import * as common from '../common/index.mjs';
+import { setTimeout } from 'timers/promises';
+import { Readable }from '../../lib/ours/index.js';
+import assert from 'assert';
+import tap from 'tap';
+
+
+function oneTo5() {
+ return Readable.from([1, 2, 3, 4, 5]);
+}
+
+function oneTo5Async() {
+ return oneTo5().map(async (x) => {
+ await Promise.resolve();
+ return x;
+ });
+}
+{
+ // Some, find, and every work with a synchronous stream and predicate
+ assert.strictEqual(await oneTo5().some((x) => x > 3), true);
+ assert.strictEqual(await oneTo5().every((x) => x > 3), false);
+ assert.strictEqual(await oneTo5().find((x) => x > 3), 4);
+ assert.strictEqual(await oneTo5().some((x) => x > 6), false);
+ assert.strictEqual(await oneTo5().every((x) => x < 6), true);
+ assert.strictEqual(await oneTo5().find((x) => x > 6), undefined);
+ assert.strictEqual(await Readable.from([]).some(() => true), false);
+ assert.strictEqual(await Readable.from([]).every(() => true), true);
+ assert.strictEqual(await Readable.from([]).find(() => true), undefined);
+}
+
+{
+ // Some, find, and every work with an asynchronous stream and synchronous predicate
+ assert.strictEqual(await oneTo5Async().some((x) => x > 3), true);
+ assert.strictEqual(await oneTo5Async().every((x) => x > 3), false);
+ assert.strictEqual(await oneTo5Async().find((x) => x > 3), 4);
+ assert.strictEqual(await oneTo5Async().some((x) => x > 6), false);
+ assert.strictEqual(await oneTo5Async().every((x) => x < 6), true);
+ assert.strictEqual(await oneTo5Async().find((x) => x > 6), undefined);
+}
+
+{
+ // Some, find, and every work on synchronous streams with an asynchronous predicate
+ assert.strictEqual(await oneTo5().some(async (x) => x > 3), true);
+ assert.strictEqual(await oneTo5().every(async (x) => x > 3), false);
+ assert.strictEqual(await oneTo5().find(async (x) => x > 3), 4);
+ assert.strictEqual(await oneTo5().some(async (x) => x > 6), false);
+ assert.strictEqual(await oneTo5().every(async (x) => x < 6), true);
+ assert.strictEqual(await oneTo5().find(async (x) => x > 6), undefined);
+}
+
+{
+ // Some, find, and every work on asynchronous streams with an asynchronous predicate
+ assert.strictEqual(await oneTo5Async().some(async (x) => x > 3), true);
+ assert.strictEqual(await oneTo5Async().every(async (x) => x > 3), false);
+ assert.strictEqual(await oneTo5Async().find(async (x) => x > 3), 4);
+ assert.strictEqual(await oneTo5Async().some(async (x) => x > 6), false);
+ assert.strictEqual(await oneTo5Async().every(async (x) => x < 6), true);
+ assert.strictEqual(await oneTo5Async().find(async (x) => x > 6), undefined);
+}
+
+{
+ async function checkDestroyed(stream) {
+ await setTimeout();
+ assert.strictEqual(stream.destroyed, true);
+ }
+
+ {
+ // Some, find, and every short circuit
+ const someStream = oneTo5();
+ await someStream.some(common.mustCall((x) => x > 2, 3));
+ await checkDestroyed(someStream);
+
+ const everyStream = oneTo5();
+ await everyStream.every(common.mustCall((x) => x < 3, 3));
+ await checkDestroyed(everyStream);
+
+ const findStream = oneTo5();
+ await findStream.find(common.mustCall((x) => x > 1, 2));
+ await checkDestroyed(findStream);
+
+ // When short circuit isn't possible the whole stream is iterated
+ await oneTo5().some(common.mustCall(() => false, 5));
+ await oneTo5().every(common.mustCall(() => true, 5));
+ await oneTo5().find(common.mustCall(() => false, 5));
+ }
+
+ {
+ // Some, find, and every short circuit async stream/predicate
+ const someStream = oneTo5Async();
+ await someStream.some(common.mustCall(async (x) => x > 2, 3));
+ await checkDestroyed(someStream);
+
+ const everyStream = oneTo5Async();
+ await everyStream.every(common.mustCall(async (x) => x < 3, 3));
+ await checkDestroyed(everyStream);
+
+ const findStream = oneTo5Async();
+ await findStream.find(common.mustCall(async (x) => x > 1, 2));
+ await checkDestroyed(findStream);
+
+ // When short circuit isn't possible the whole stream is iterated
+ await oneTo5Async().some(common.mustCall(async () => false, 5));
+ await oneTo5Async().every(common.mustCall(async () => true, 5));
+ await oneTo5Async().find(common.mustCall(async () => false, 5));
+ }
+}
+
+{
+ // Concurrency doesn't affect which value is found.
+ const found = await Readable.from([1, 2]).find(async (val) => {
+ if (val === 1) {
+ await setTimeout(100);
+ }
+ return true;
+ }, { concurrency: 2 });
+ assert.strictEqual(found, 1);
+}
+
+{
+ // Support for AbortSignal
+ for (const op of ['some', 'every', 'find']) {
+ {
+ const ac = new AbortController();
+ assert.rejects(Readable.from([1, 2, 3])[op](
+ () => new Promise(() => { }),
+ { signal: ac.signal }
+ ), {
+ name: 'AbortError',
+ }, `${op} should abort correctly with sync abort`).then(common.mustCall());
+ ac.abort();
+ }
+ {
+ // Support for pre-aborted AbortSignal
+ assert.rejects(Readable.from([1, 2, 3])[op](
+ () => new Promise(() => { }),
+ { signal: AbortSignal.abort() }
+ ), {
+ name: 'AbortError',
+ }, `${op} should abort with pre-aborted abort controller`).then(common.mustCall());
+ }
+ }
+}
+{
+ // Error cases
+ for (const op of ['some', 'every', 'find']) {
+ assert.rejects(async () => {
+ await Readable.from([1])[op](1);
+ }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid function`).then(common.mustCall());
+ assert.rejects(async () => {
+ await Readable.from([1])[op]((x) => x, {
+ concurrency: 'Foo'
+ });
+ }, /ERR_OUT_OF_RANGE/, `${op} should throw for invalid concurrency`).then(common.mustCall());
+ assert.rejects(async () => {
+ await Readable.from([1])[op]((x) => x, 1);
+ }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid concurrency`).then(common.mustCall());
+ assert.rejects(async () => {
+ await Readable.from([1])[op]((x) => x, {
+ signal: true
+ });
+ }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid signal`).then(common.mustCall());
+ }
+}
+{
+ for (const op of ['some', 'every', 'find']) {
+ const stream = oneTo5();
+ Object.defineProperty(stream, 'map', {
+ value: common.mustNotCall(() => {}),
+ });
+ // Check that map isn't getting called.
+ stream[op](() => {});
+ }
+}
+
+ /* replacement start */
+ process.on('beforeExit', (code) => {
+ if(code === 0) {
+ tap.pass('test succeeded');
+ } else {
+ tap.fail(`test failed - exited code ${code}`);
+ }
+ });
+ /* replacement end */
diff --git a/test/parallel/test-stream-toArray.js b/test/parallel/test-stream-toArray.js
new file mode 100644
index 0000000000..8f3e9b6e76
--- /dev/null
+++ b/test/parallel/test-stream-toArray.js
@@ -0,0 +1,142 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+{
+ // Works on a synchronous stream
+ ;(async () => {
+ const tests = [
+ [],
+ [1],
+ [1, 2, 3],
+ Array(100)
+ .fill()
+ .map((_, i) => i)
+ ]
+
+ for (const test of tests) {
+ const stream = Readable.from(test)
+ const result = await stream.toArray()
+ assert.deepStrictEqual(result, test)
+ }
+ })().then(common.mustCall())
+}
+{
+ // Works on a non-object-mode stream
+ ;(async () => {
+ const firstBuffer = Buffer.from([1, 2, 3])
+ const secondBuffer = Buffer.from([4, 5, 6])
+ const stream = Readable.from([firstBuffer, secondBuffer], {
+ objectMode: false
+ })
+ const result = await stream.toArray()
+ assert.strictEqual(Array.isArray(result), true)
+ assert.deepStrictEqual(result, [firstBuffer, secondBuffer])
+ })().then(common.mustCall())
+}
+{
+ // Works on an asynchronous stream
+ ;(async () => {
+ const tests = [
+ [],
+ [1],
+ [1, 2, 3],
+ Array(100)
+ .fill()
+ .map((_, i) => i)
+ ]
+
+ for (const test of tests) {
+ const stream = Readable.from(test).map((x) => Promise.resolve(x))
+ const result = await stream.toArray()
+ assert.deepStrictEqual(result, test)
+ }
+ })().then(common.mustCall())
+}
+{
+ // Support for AbortSignal
+ const ac = new AbortController()
+ let stream
+ assert
+ .rejects(
+ async () => {
+ stream = Readable.from([1, 2, 3]).map(async (x) => {
+ if (x === 3) {
+ await new Promise(() => {}) // Explicitly do not pass signal here
+ }
+
+ return Promise.resolve(x)
+ })
+ await stream.toArray({
+ signal: ac.signal
+ })
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(
+ common.mustCall(() => {
+ // Only stops toArray, does not destroy the stream
+ assert(stream.destroyed, false)
+ })
+ )
+ ac.abort()
+}
+{
+ // Test result is a Promise
+ const result = Readable.from([1, 2, 3, 4, 5]).toArray()
+ assert.strictEqual(result instanceof Promise, true)
+}
+{
+ // Error cases
+ assert
+ .rejects(async () => {
+ await Readable.from([1]).toArray(1)
+ }, /ERR_INVALID_ARG_TYPE/)
+ .then(common.mustCall())
+ assert
+ .rejects(async () => {
+ await Readable.from([1]).toArray({
+ signal: true
+ })
+ }, /ERR_INVALID_ARG_TYPE/)
+ .then(common.mustCall())
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-callback-twice.js b/test/parallel/test-stream-transform-callback-twice.js
index 005d5bf679..0ab6afd278 100644
--- a/test/parallel/test-stream-transform-callback-twice.js
+++ b/test/parallel/test-stream-transform-callback-twice.js
@@ -1,41 +1,38 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var _require = require('../../'),
- Transform = _require.Transform;
+const { Transform } = require('../../lib/ours/index')
-var stream = new Transform({
- transform: function transform(chunk, enc, cb) {
- cb();
- cb();
+const stream = new Transform({
+ transform(chunk, enc, cb) {
+ cb()
+ cb()
}
-});
-stream.on('error', common.expectsError({
- type: Error,
- message: 'Callback called multiple times',
- code: 'ERR_MULTIPLE_CALLBACK'
-}));
-stream.write('foo');
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+stream.on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'Callback called multiple times',
+ code: 'ERR_MULTIPLE_CALLBACK'
+ })
+)
+stream.write('foo')
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-constructor-set-methods.js b/test/parallel/test-stream-transform-constructor-set-methods.js
index 2b57d43b4f..0404839fed 100644
--- a/test/parallel/test-stream-transform-constructor-set-methods.js
+++ b/test/parallel/test-stream-transform-constructor-set-methods.js
@@ -1,62 +1,59 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var _require = require('assert/'),
- strictEqual = _require.strictEqual;
+const assert = require('assert')
-var _require2 = require('../../'),
- Transform = _require2.Transform;
+const { Transform } = require('../../lib/ours/index')
-var t = new Transform();
-t.on('error', common.expectsError({
- type: Error,
- code: 'ERR_METHOD_NOT_IMPLEMENTED',
- message: 'The _transform() method is not implemented'
-}));
-t.end(bufferShim.from('blerg'));
+const t = new Transform()
+assert.throws(
+ () => {
+ t.end(Buffer.from('blerg'))
+ },
+ {
+ name: 'Error',
+ code: 'ERR_METHOD_NOT_IMPLEMENTED',
+ message: 'The _transform() method is not implemented'
+ }
+)
-var _transform = common.mustCall(function (chunk, _, next) {
- next();
-});
+const _transform = common.mustCall((chunk, _, next) => {
+ next()
+})
-var _final = common.mustCall(function (next) {
- next();
-});
+const _final = common.mustCall((next) => {
+ next()
+})
-var _flush = common.mustCall(function (next) {
- next();
-});
+const _flush = common.mustCall((next) => {
+ next()
+})
-var t2 = new Transform({
+const t2 = new Transform({
transform: _transform,
flush: _flush,
final: _final
-});
-strictEqual(t2._transform, _transform);
-strictEqual(t2._flush, _flush);
-strictEqual(t2._final, _final);
-t2.end(bufferShim.from('blerg'));
-t2.resume();
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+assert.strictEqual(t2._transform, _transform)
+assert.strictEqual(t2._flush, _flush)
+assert.strictEqual(t2._final, _final)
+t2.end(Buffer.from('blerg'))
+t2.resume()
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-destroy.js b/test/parallel/test-stream-transform-destroy.js
index 3a6f1dda9f..895e6a1eeb 100644
--- a/test/parallel/test-stream-transform-destroy.js
+++ b/test/parallel/test-stream-transform-destroy.js
@@ -1,181 +1,142 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var _require = require('../../'),
- Transform = _require.Transform;
+const { Transform } = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
{
- var transform = new Transform({
- transform: function transform(chunk, enc, cb) {}
- });
- transform.resume();
- transform.on('end', common.mustNotCall());
- transform.on('close', common.mustCall());
- transform.on('finish', common.mustNotCall());
- transform.destroy();
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ transform.resume()
+ transform.on('end', common.mustNotCall())
+ transform.on('close', common.mustCall())
+ transform.on('finish', common.mustNotCall())
+ transform.destroy()
}
{
- var _transform = new Transform({
- transform: function transform(chunk, enc, cb) {}
- });
-
- _transform.resume();
-
- var expected = new Error('kaboom');
-
- _transform.on('end', common.mustNotCall());
-
- _transform.on('finish', common.mustNotCall());
-
- _transform.on('close', common.mustCall());
-
- _transform.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, expected);
- }));
-
- _transform.destroy(expected);
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ transform.resume()
+ const expected = new Error('kaboom')
+ transform.on('end', common.mustNotCall())
+ transform.on('finish', common.mustNotCall())
+ transform.on('close', common.mustCall())
+ transform.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ transform.destroy(expected)
}
{
- var _transform2 = new Transform({
- transform: function transform(chunk, enc, cb) {}
- });
-
- _transform2._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, _expected);
- cb(err);
- }, 1);
-
- var _expected = new Error('kaboom');
-
- _transform2.on('finish', common.mustNotCall('no finish event'));
-
- _transform2.on('close', common.mustCall());
-
- _transform2.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, _expected);
- }));
-
- _transform2.destroy(_expected);
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ transform._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb(err)
+ }, 1)
+ const expected = new Error('kaboom')
+ transform.on('finish', common.mustNotCall('no finish event'))
+ transform.on('close', common.mustCall())
+ transform.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ transform.destroy(expected)
}
{
- var _expected2 = new Error('kaboom');
+ const expected = new Error('kaboom')
+ const transform = new Transform({
+ transform(chunk, enc, cb) {},
- var _transform3 = new Transform({
- transform: function transform(chunk, enc, cb) {},
destroy: common.mustCall(function (err, cb) {
- assert.strictEqual(err, _expected2);
- cb();
+ assert.strictEqual(err, expected)
+ cb()
}, 1)
- });
-
- _transform3.resume();
-
- _transform3.on('end', common.mustNotCall('no end event'));
-
- _transform3.on('close', common.mustCall());
-
- _transform3.on('finish', common.mustNotCall('no finish event')); // error is swallowed by the custom _destroy
-
-
- _transform3.on('error', common.mustNotCall('no error event'));
-
- _transform3.destroy(_expected2);
+ })
+ transform.resume()
+ transform.on('end', common.mustNotCall('no end event'))
+ transform.on('close', common.mustCall())
+ transform.on('finish', common.mustNotCall('no finish event')) // Error is swallowed by the custom _destroy
+
+ transform.on('error', common.mustNotCall('no error event'))
+ transform.destroy(expected)
}
{
- var _transform4 = new Transform({
- transform: function transform(chunk, enc, cb) {}
- });
-
- _transform4._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, null);
- cb();
- }, 1);
-
- _transform4.destroy();
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ transform._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb()
+ }, 1)
+ transform.destroy()
}
{
- var _transform5 = new Transform({
- transform: function transform(chunk, enc, cb) {}
- });
-
- _transform5.resume();
-
- _transform5._destroy = common.mustCall(function (err, cb) {
- var _this = this;
-
- assert.strictEqual(err, null);
- process.nextTick(function () {
- _this.push(null);
-
- _this.end();
-
- cb();
- });
- }, 1);
- var fail = common.mustNotCall('no event');
-
- _transform5.on('finish', fail);
-
- _transform5.on('end', fail);
-
- _transform5.on('close', common.mustCall());
-
- _transform5.destroy();
-
- _transform5.removeListener('end', fail);
-
- _transform5.removeListener('finish', fail);
-
- _transform5.on('end', common.mustCall());
-
- _transform5.on('finish', common.mustCall());
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ transform.resume()
+ transform._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ process.nextTick(() => {
+ this.push(null)
+ this.end()
+ cb()
+ })
+ }, 1)
+ const fail = common.mustNotCall('no event')
+ transform.on('finish', fail)
+ transform.on('end', fail)
+ transform.on('close', common.mustCall())
+ transform.destroy()
+ transform.removeListener('end', fail)
+ transform.removeListener('finish', fail)
+ transform.on('end', common.mustCall())
+ transform.on('finish', common.mustNotCall())
}
{
- var _transform6 = new Transform({
- transform: function transform(chunk, enc, cb) {}
- });
-
- var _expected3 = new Error('kaboom');
-
- _transform6._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, null);
- cb(_expected3);
- }, 1);
-
- _transform6.on('close', common.mustCall());
-
- _transform6.on('finish', common.mustNotCall('no finish event'));
-
- _transform6.on('end', common.mustNotCall('no end event'));
-
- _transform6.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, _expected3);
- }));
-
- _transform6.destroy();
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ const expected = new Error('kaboom')
+ transform._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb(expected)
+ }, 1)
+ transform.on('close', common.mustCall())
+ transform.on('finish', common.mustNotCall('no finish event'))
+ transform.on('end', common.mustNotCall('no end event'))
+ transform.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ transform.destroy()
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-final-sync.js b/test/parallel/test-stream-transform-final-sync.js
index 39c0b46a98..7999d44cab 100644
--- a/test/parallel/test-stream-transform-final-sync.js
+++ b/test/parallel/test-stream-transform-final-sync.js
@@ -1,134 +1,139 @@
-"use strict";
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert/');
-
-var stream = require('../../');
-
-var state = 0;
-/*
-What you do
-var stream = new stream.Transform({
- transform: function transformCallback(chunk, _, next) {
- // part 1
- this.push(chunk);
- //part 2
- next();
- },
- final: function endCallback(done) {
- // part 1
- process.nextTick(function () {
- // part 2
- done();
- });
- },
- flush: function flushCallback(done) {
- // part 1
- process.nextTick(function () {
- // part 2
- done();
- });
- }
-});
-t.on('data', dataListener);
-t.on('end', endListener);
-t.on('finish', finishListener);
-t.write(1);
-t.write(4);
-t.end(7, endMethodCallback);
-
-The order things are called
-
-1. transformCallback part 1
-2. dataListener
-3. transformCallback part 2
-4. transformCallback part 1
-5. dataListener
-6. transformCallback part 2
-7. transformCallback part 1
-8. dataListener
-9. transformCallback part 2
-10. finalCallback part 1
-11. finalCallback part 2
-12. flushCallback part 1
-13. finishListener
-14. endMethodCallback
-15. flushCallback part 2
-16. endListener
-*/
-
-var t = new stream.Transform({
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const stream = require('../../lib/ours/index')
+
+let state = 0 // What you do
+//
+// const stream = new stream.Transform({
+// transform: function transformCallback(chunk, _, next) {
+// // part 1
+// this.push(chunk);
+// //part 2
+// next();
+// },
+// final: function endCallback(done) {
+// // part 1
+// process.nextTick(function () {
+// // part 2
+// done();
+// });
+// },
+// flush: function flushCallback(done) {
+// // part 1
+// process.nextTick(function () {
+// // part 2
+// done();
+// });
+// }
+// });
+// t.on('data', dataListener);
+// t.on('end', endListener);
+// t.on('finish', finishListener);
+// t.write(1);
+// t.write(4);
+// t.end(7, endMethodCallback);
+//
+// The order things are called
+//
+// 1. transformCallback part 1
+// 2. dataListener
+// 3. transformCallback part 2
+// 4. transformCallback part 1
+// 5. dataListener
+// 6. transformCallback part 2
+// 7. transformCallback part 1
+// 8. dataListener
+// 9. transformCallback part 2
+// 10. finalCallback part 1
+// 11. finalCallback part 2
+// 12. flushCallback part 1
+// 13. finishListener
+// 14. endMethodCallback
+// 15. flushCallback part 2
+// 16. endListener
+
+const t = new stream.Transform({
objectMode: true,
transform: common.mustCall(function (chunk, _, next) {
// transformCallback part 1
- assert.strictEqual(++state, chunk);
- this.push(state); // transformCallback part 2
+ assert.strictEqual(++state, chunk)
+ this.push(state) // transformCallback part 2
- assert.strictEqual(++state, chunk + 2);
- process.nextTick(next);
+ assert.strictEqual(++state, chunk + 2)
+ process.nextTick(next)
}, 3),
final: common.mustCall(function (done) {
- state++; // finalCallback part 1
+ state++ // finalCallback part 1
- assert.strictEqual(state, 10);
- state++; // finalCallback part 2
+ assert.strictEqual(state, 10)
+ state++ // finalCallback part 2
- assert.strictEqual(state, 11);
- done();
+ assert.strictEqual(state, 11)
+ done()
}, 1),
flush: common.mustCall(function (done) {
- state++; // fluchCallback part 1
+ state++ // fluchCallback part 1
- assert.strictEqual(state, 12);
+ assert.strictEqual(state, 12)
process.nextTick(function () {
- state++; // fluchCallback part 2
+ state++ // fluchCallback part 2
- assert.strictEqual(state, 15);
- done();
- });
+ assert.strictEqual(state, 13)
+ done()
+ })
}, 1)
-});
-t.on('finish', common.mustCall(function () {
- state++; // finishListener
-
- assert.strictEqual(state, 13);
-}, 1));
-t.on('end', common.mustCall(function () {
- state++; // endEvent
+})
+t.on(
+ 'finish',
+ common.mustCall(function () {
+ state++ // finishListener
- assert.strictEqual(state, 16);
-}, 1));
-t.on('data', common.mustCall(function (d) {
- // dataListener
- assert.strictEqual(++state, d + 1);
-}, 3));
-t.write(1);
-t.write(4);
-t.end(7, common.mustCall(function () {
- state++; // endMethodCallback
-
- assert.strictEqual(state, 14);
-}, 1));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+ assert.strictEqual(state, 15)
+ }, 1)
+)
+t.on(
+ 'end',
+ common.mustCall(function () {
+ state++ // endEvent
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(state, 16)
+ }, 1)
+)
+t.on(
+ 'data',
+ common.mustCall(function (d) {
+ // dataListener
+ assert.strictEqual(++state, d + 1)
+ }, 3)
+)
+t.write(1)
+t.write(4)
+t.end(
+ 7,
+ common.mustCall(function () {
+ state++ // endMethodCallback
+
+ assert.strictEqual(state, 14)
+ }, 1)
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-final.js b/test/parallel/test-stream-transform-final.js
index 37b270ca8f..0014844a5b 100644
--- a/test/parallel/test-stream-transform-final.js
+++ b/test/parallel/test-stream-transform-final.js
@@ -1,136 +1,140 @@
-"use strict";
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert/');
-
-var stream = require('../../');
-
-var state = 0;
-/*
-What you do
-var stream = new stream.Transform({
- transform: function transformCallback(chunk, _, next) {
- // part 1
- this.push(chunk);
- //part 2
- next();
- },
- final: function endCallback(done) {
- // part 1
- process.nextTick(function () {
- // part 2
- done();
- });
- },
- flush: function flushCallback(done) {
- // part 1
- process.nextTick(function () {
- // part 2
- done();
- });
- }
-});
-t.on('data', dataListener);
-t.on('end', endListener);
-t.on('finish', finishListener);
-t.write(1);
-t.write(4);
-t.end(7, endMethodCallback);
-
-The order things are called
-
-1. transformCallback part 1
-2. dataListener
-3. transformCallback part 2
-4. transformCallback part 1
-5. dataListener
-6. transformCallback part 2
-7. transformCallback part 1
-8. dataListener
-9. transformCallback part 2
-10. finalCallback part 1
-11. finalCallback part 2
-12. flushCallback part 1
-13. finishListener
-14. endMethodCallback
-15. flushCallback part 2
-16. endListener
-*/
-
-var t = new stream.Transform({
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const stream = require('../../lib/ours/index')
+
+let state = 0 // What you do:
+//
+// const stream = new stream.Transform({
+// transform: function transformCallback(chunk, _, next) {
+// // part 1
+// this.push(chunk);
+// //part 2
+// next();
+// },
+// final: function endCallback(done) {
+// // part 1
+// process.nextTick(function () {
+// // part 2
+// done();
+// });
+// },
+// flush: function flushCallback(done) {
+// // part 1
+// process.nextTick(function () {
+// // part 2
+// done();
+// });
+// }
+// });
+// t.on('data', dataListener);
+// t.on('end', endListener);
+// t.on('finish', finishListener);
+// t.write(1);
+// t.write(4);
+// t.end(7, endMethodCallback);
+//
+// The order things are called
+// 1. transformCallback part 1
+// 2. dataListener
+// 3. transformCallback part 2
+// 4. transformCallback part 1
+// 5. dataListener
+// 6. transformCallback part 2
+// 7. transformCallback part 1
+// 8. dataListener
+// 9. transformCallback part 2
+// 10. finalCallback part 1
+// 11. finalCallback part 2
+// 12. flushCallback part 1
+// 13. finishListener
+// 14. endMethodCallback
+// 15. flushCallback part 2
+// 16. endListener
+
+const t = new stream.Transform({
objectMode: true,
transform: common.mustCall(function (chunk, _, next) {
// transformCallback part 1
- assert.strictEqual(++state, chunk);
- this.push(state); // transformCallback part 2
+ assert.strictEqual(++state, chunk)
+ this.push(state) // transformCallback part 2
- assert.strictEqual(++state, chunk + 2);
- process.nextTick(next);
+ assert.strictEqual(++state, chunk + 2)
+ process.nextTick(next)
}, 3),
final: common.mustCall(function (done) {
- state++; // finalCallback part 1
+ state++ // finalCallback part 1
- assert.strictEqual(state, 10);
+ assert.strictEqual(state, 10)
setTimeout(function () {
- state++; // finalCallback part 2
+ state++ // finalCallback part 2
- assert.strictEqual(state, 11);
- done();
- }, 100);
+ assert.strictEqual(state, 11)
+ done()
+ }, 100)
}, 1),
flush: common.mustCall(function (done) {
- state++; // flushCallback part 1
+ state++ // flushCallback part 1
- assert.strictEqual(state, 12);
+ assert.strictEqual(state, 12)
process.nextTick(function () {
- state++; // flushCallback part 2
+ state++ // flushCallback part 2
- assert.strictEqual(state, 15);
- done();
- });
+ assert.strictEqual(state, 13)
+ done()
+ })
}, 1)
-});
-t.on('finish', common.mustCall(function () {
- state++; // finishListener
-
- assert.strictEqual(state, 13);
-}, 1));
-t.on('end', common.mustCall(function () {
- state++; // end event
+})
+t.on(
+ 'finish',
+ common.mustCall(function () {
+ state++ // finishListener
- assert.strictEqual(state, 16);
-}, 1));
-t.on('data', common.mustCall(function (d) {
- // dataListener
- assert.strictEqual(++state, d + 1);
-}, 3));
-t.write(1);
-t.write(4);
-t.end(7, common.mustCall(function () {
- state++; // endMethodCallback
-
- assert.strictEqual(state, 14);
-}, 1));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+ assert.strictEqual(state, 15)
+ }, 1)
+)
+t.on(
+ 'end',
+ common.mustCall(function () {
+ state++ // end event
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(state, 16)
+ }, 1)
+)
+t.on(
+ 'data',
+ common.mustCall(function (d) {
+ // dataListener
+ assert.strictEqual(++state, d + 1)
+ }, 3)
+)
+t.write(1)
+t.write(4)
+t.end(
+ 7,
+ common.mustCall(function () {
+ state++ // endMethodCallback
+
+ assert.strictEqual(state, 14)
+ }, 1)
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-flush-data.js b/test/parallel/test-stream-transform-flush-data.js
index a6f9810315..7541048528 100644
--- a/test/parallel/test-stream-transform-flush-data.js
+++ b/test/parallel/test-stream-transform-flush-data.js
@@ -1,48 +1,43 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var Transform = require('../../').Transform;
+const Transform = require('../../lib/ours/index').Transform
-var expected = 'asdf';
+const expected = 'asdf'
function _transform(d, e, n) {
- n();
+ n()
}
function _flush(n) {
- n(null, expected);
+ n(null, expected)
}
-var t = new Transform({
+const t = new Transform({
transform: _transform,
flush: _flush
-});
-t.end(bufferShim.from('blerg'));
-t.on('data', function (data) {
- assert.strictEqual(data.toString(), expected);
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+t.end(Buffer.from('blerg'))
+t.on('data', (data) => {
+ assert.strictEqual(data.toString(), expected)
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-objectmode-falsey-value.js b/test/parallel/test-stream-transform-objectmode-falsey-value.js
index 39ec6c3c8f..e2ae60a7e6 100644
--- a/test/parallel/test-stream-transform-objectmode-falsey-value.js
+++ b/test/parallel/test-stream-transform-objectmode-falsey-value.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,58 +18,60 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var PassThrough = stream.PassThrough;
-var src = new PassThrough({
+const PassThrough = stream.PassThrough
+const src = new PassThrough({
objectMode: true
-});
-var tx = new PassThrough({
+})
+const tx = new PassThrough({
objectMode: true
-});
-var dest = new PassThrough({
+})
+const dest = new PassThrough({
objectMode: true
-});
-var expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
-var results = [];
-dest.on('data', common.mustCall(function (x) {
- results.push(x);
-}, expect.length));
-src.pipe(tx).pipe(dest);
-var i = -1;
-var int = setInterval(common.mustCall(function () {
- if (results.length === expect.length) {
- src.end();
- clearInterval(int);
- assert.deepStrictEqual(results, expect);
+})
+const expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+const results = []
+dest.on(
+ 'data',
+ common.mustCall(function (x) {
+ results.push(x)
+ }, expect.length)
+)
+src.pipe(tx).pipe(dest)
+let i = -1
+const int = setInterval(
+ common.mustCall(function () {
+ if (results.length === expect.length) {
+ src.end()
+ clearInterval(int)
+ assert.deepStrictEqual(results, expect)
+ } else {
+ src.write(i++)
+ }
+ }, expect.length + 1),
+ 1
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
} else {
- src.write(i++);
+ tap.fail(`test failed - exited code ${code}`)
}
-}, expect.length + 1), 1);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-split-highwatermark.js b/test/parallel/test-stream-transform-split-highwatermark.js
index 5de64157aa..8eac3eda60 100644
--- a/test/parallel/test-stream-transform-split-highwatermark.js
+++ b/test/parallel/test-stream-transform-split-highwatermark.js
@@ -1,136 +1,133 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var _require = require('../../'),
- Transform = _require.Transform,
- Readable = _require.Readable,
- Writable = _require.Writable;
+const { Transform, Readable, Writable } = require('../../lib/ours/index')
-var DEFAULT = 16 * 1024;
+const DEFAULT = 16 * 1024
function testTransform(expectedReadableHwm, expectedWritableHwm, options) {
- var t = new Transform(options);
- assert.strictEqual(t._readableState.highWaterMark, expectedReadableHwm);
- assert.strictEqual(t._writableState.highWaterMark, expectedWritableHwm);
-} // test overriding defaultHwm
-
+ const t = new Transform(options)
+ assert.strictEqual(t._readableState.highWaterMark, expectedReadableHwm)
+ assert.strictEqual(t._writableState.highWaterMark, expectedWritableHwm)
+} // Test overriding defaultHwm
testTransform(666, DEFAULT, {
readableHighWaterMark: 666
-});
+})
testTransform(DEFAULT, 777, {
writableHighWaterMark: 777
-});
+})
testTransform(666, 777, {
readableHighWaterMark: 666,
writableHighWaterMark: 777
-}); // test 0 overriding defaultHwm
+}) // test 0 overriding defaultHwm
testTransform(0, DEFAULT, {
readableHighWaterMark: 0
-});
+})
testTransform(DEFAULT, 0, {
writableHighWaterMark: 0
-}); // test highWaterMark overriding
+}) // Test highWaterMark overriding
testTransform(555, 555, {
highWaterMark: 555,
readableHighWaterMark: 666
-});
+})
testTransform(555, 555, {
highWaterMark: 555,
writableHighWaterMark: 777
-});
+})
testTransform(555, 555, {
highWaterMark: 555,
readableHighWaterMark: 666,
writableHighWaterMark: 777
-}); // test highWaterMark = 0 overriding
+}) // Test highWaterMark = 0 overriding
testTransform(0, 0, {
highWaterMark: 0,
readableHighWaterMark: 666
-});
+})
testTransform(0, 0, {
highWaterMark: 0,
writableHighWaterMark: 777
-});
+})
testTransform(0, 0, {
highWaterMark: 0,
readableHighWaterMark: 666,
writableHighWaterMark: 777
-}); // test undefined, null
+}) // Test undefined, null
-[undefined, null].forEach(function (v) {
+;[undefined, null].forEach((v) => {
testTransform(DEFAULT, DEFAULT, {
readableHighWaterMark: v
- });
+ })
testTransform(DEFAULT, DEFAULT, {
writableHighWaterMark: v
- });
+ })
testTransform(666, DEFAULT, {
highWaterMark: v,
readableHighWaterMark: 666
- });
+ })
testTransform(DEFAULT, 777, {
highWaterMark: v,
writableHighWaterMark: 777
- });
-}); // test NaN
+ })
+}) // test NaN
{
- common.expectsError(function () {
- new Transform({
- readableHighWaterMark: NaN
- });
- }, {
- type: TypeError,
- code: 'ERR_INVALID_OPT_VALUE',
- message: 'The value "NaN" is invalid for option "readableHighWaterMark"'
- });
- common.expectsError(function () {
- new Transform({
- writableHighWaterMark: NaN
- });
- }, {
- type: TypeError,
- code: 'ERR_INVALID_OPT_VALUE',
- message: 'The value "NaN" is invalid for option "writableHighWaterMark"'
- });
-} // test non Duplex streams ignore the options
+ assert.throws(
+ () => {
+ new Transform({
+ readableHighWaterMark: NaN
+ })
+ },
+ {
+ name: 'TypeError',
+ code: 'ERR_INVALID_ARG_VALUE',
+ message: "The property 'options.readableHighWaterMark' is invalid. " + 'Received NaN'
+ }
+ )
+ assert.throws(
+ () => {
+ new Transform({
+ writableHighWaterMark: NaN
+ })
+ },
+ {
+ name: 'TypeError',
+ code: 'ERR_INVALID_ARG_VALUE',
+ message: "The property 'options.writableHighWaterMark' is invalid. " + 'Received NaN'
+ }
+ )
+} // Test non Duplex streams ignore the options
{
- var r = new Readable({
+ const r = new Readable({
readableHighWaterMark: 666
- });
- assert.strictEqual(r._readableState.highWaterMark, DEFAULT);
- var w = new Writable({
+ })
+ assert.strictEqual(r._readableState.highWaterMark, DEFAULT)
+ const w = new Writable({
writableHighWaterMark: 777
- });
- assert.strictEqual(w._writableState.highWaterMark, DEFAULT);
+ })
+ assert.strictEqual(w._writableState.highWaterMark, DEFAULT)
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-split-objectmode.js b/test/parallel/test-stream-transform-split-objectmode.js
index e2349c1862..db93fe9342 100644
--- a/test/parallel/test-stream-transform-split-objectmode.js
+++ b/test/parallel/test-stream-transform-split-objectmode.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,80 +18,76 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var Transform = require('../../').Transform;
+const Transform = require('../../lib/ours/index').Transform
-var parser = new Transform({
+const parser = new Transform({
readableObjectMode: true
-});
-assert(parser._readableState.objectMode);
-assert(!parser._writableState.objectMode);
-assert.strictEqual(parser.readableHighWaterMark, 16);
-assert.strictEqual(parser.writableHighWaterMark, 16 * 1024);
-assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark);
-assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark);
+})
+assert(parser._readableState.objectMode)
+assert(!parser._writableState.objectMode)
+assert.strictEqual(parser.readableHighWaterMark, 16)
+assert.strictEqual(parser.writableHighWaterMark, 16 * 1024)
+assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark)
+assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark)
parser._transform = function (chunk, enc, callback) {
callback(null, {
val: chunk[0]
- });
-};
+ })
+}
-var parsed;
+let parsed
parser.on('data', function (obj) {
- parsed = obj;
-});
-parser.end(bufferShim.from([42]));
+ parsed = obj
+})
+parser.end(Buffer.from([42]))
process.on('exit', function () {
- assert.strictEqual(parsed.val, 42);
-});
-var serializer = new Transform({
+ assert.strictEqual(parsed.val, 42)
+})
+const serializer = new Transform({
writableObjectMode: true
-});
-assert(!serializer._readableState.objectMode);
-assert(serializer._writableState.objectMode);
-assert.strictEqual(serializer.readableHighWaterMark, 16 * 1024);
-assert.strictEqual(serializer.writableHighWaterMark, 16);
-assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark);
-assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark);
+})
+assert(!serializer._readableState.objectMode)
+assert(serializer._writableState.objectMode)
+assert.strictEqual(serializer.readableHighWaterMark, 16 * 1024)
+assert.strictEqual(serializer.writableHighWaterMark, 16)
+assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark)
+assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark)
serializer._transform = function (obj, _, callback) {
- callback(null, bufferShim.from([obj.val]));
-};
+ callback(null, Buffer.from([obj.val]))
+}
-var serialized;
+let serialized
serializer.on('data', function (chunk) {
- serialized = chunk;
-});
+ serialized = chunk
+})
serializer.write({
val: 42
-});
+})
process.on('exit', function () {
- assert.strictEqual(serialized[0], 42);
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(serialized[0], 42)
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-uint8array.js b/test/parallel/test-stream-uint8array.js
index b5b767270c..a57e13da16 100644
--- a/test/parallel/test-stream-uint8array.js
+++ b/test/parallel/test-stream-uint8array.js
@@ -1,131 +1,105 @@
-"use strict";
+'use strict'
-function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); }
+const tap = require('tap')
-function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance"); }
+const silentConsole = {
+ log() {},
-function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); }
-
-function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } }
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var _require = require('../../'),
- Readable = _require.Readable,
- Writable = _require.Writable;
+const { Readable, Writable } = require('../../lib/ours/index')
-var ABC = new Uint8Array([0x41, 0x42, 0x43]);
-var DEF = new Uint8Array([0x44, 0x45, 0x46]);
-var GHI = new Uint8Array([0x47, 0x48, 0x49]);
+const ABC = new Uint8Array([0x41, 0x42, 0x43])
+const DEF = new Uint8Array([0x44, 0x45, 0x46])
+const GHI = new Uint8Array([0x47, 0x48, 0x49])
{
// Simple Writable test.
- var n = 0;
- var writable = new Writable({
- write: common.mustCall(function (chunk, encoding, cb) {
- assert(chunk instanceof Buffer);
+ let n = 0
+ const writable = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert(chunk instanceof Buffer)
if (n++ === 0) {
- assert.strictEqual(String(chunk), 'ABC');
+ assert.strictEqual(String(chunk), 'ABC')
} else {
- assert.strictEqual(String(chunk), 'DEF');
+ assert.strictEqual(String(chunk), 'DEF')
}
- cb();
+ cb()
}, 2)
- });
- writable.write(ABC);
- writable.end(DEF);
+ })
+ writable.write(ABC)
+ writable.end(DEF)
}
{
// Writable test, pass in Uint8Array in object mode.
- var _writable = new Writable({
+ const writable = new Writable({
objectMode: true,
- write: common.mustCall(function (chunk, encoding, cb) {
- assert(!(chunk instanceof Buffer));
- assert(chunk instanceof Uint8Array);
- assert.strictEqual(chunk, ABC);
- assert.strictEqual(encoding, 'utf8');
- cb();
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert(!(chunk instanceof Buffer))
+ assert(chunk instanceof Uint8Array)
+ assert.strictEqual(chunk, ABC)
+ assert.strictEqual(encoding, 'utf8')
+ cb()
})
- });
-
- _writable.end(ABC);
+ })
+ writable.end(ABC)
}
{
// Writable test, multiple writes carried out via writev.
- var callback;
-
- var _writable2 = new Writable({
- write: common.mustCall(function (chunk, encoding, cb) {
- assert(chunk instanceof Buffer);
- assert.strictEqual(encoding, 'buffer');
- assert.strictEqual(String(chunk), 'ABC');
- callback = cb;
+ let callback
+ const writable = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert(chunk instanceof Buffer)
+ assert.strictEqual(encoding, 'buffer')
+ assert.strictEqual(String(chunk), 'ABC')
+ callback = cb
}),
- writev: common.mustCall(function (chunks, cb) {
- assert.strictEqual(chunks.length, 2);
- assert.strictEqual(chunks[0].encoding, 'buffer');
- assert.strictEqual(chunks[1].encoding, 'buffer');
- assert.strictEqual(chunks[0].chunk + chunks[1].chunk, 'DEFGHI');
+ writev: common.mustCall((chunks, cb) => {
+ assert.strictEqual(chunks.length, 2)
+ assert.strictEqual(chunks[0].encoding, 'buffer')
+ assert.strictEqual(chunks[1].encoding, 'buffer')
+ assert.strictEqual(chunks[0].chunk + chunks[1].chunk, 'DEFGHI')
})
- });
-
- _writable2.write(ABC);
-
- _writable2.write(DEF);
-
- _writable2.end(GHI);
-
- callback();
+ })
+ writable.write(ABC)
+ writable.write(DEF)
+ writable.end(GHI)
+ callback()
}
{
// Simple Readable test.
- var readable = new Readable({
- read: function read() {}
- });
- readable.push(DEF);
- readable.unshift(ABC);
- var buf = readable.read();
- assert(buf instanceof Buffer);
- assert.deepStrictEqual(_toConsumableArray(buf), [].concat(_toConsumableArray(ABC), _toConsumableArray(DEF)));
+ const readable = new Readable({
+ read() {}
+ })
+ readable.push(DEF)
+ readable.unshift(ABC)
+ const buf = readable.read()
+ assert(buf instanceof Buffer)
+ assert.deepStrictEqual([...buf], [...ABC, ...DEF])
}
{
// Readable test, setEncoding.
- var _readable = new Readable({
- read: function read() {}
- });
-
- _readable.setEncoding('utf8');
-
- _readable.push(DEF);
-
- _readable.unshift(ABC);
-
- var out = _readable.read();
-
- assert.strictEqual(out, 'ABCDEF');
+ const readable = new Readable({
+ read() {}
+ })
+ readable.setEncoding('utf8')
+ readable.push(DEF)
+ readable.unshift(ABC)
+ const out = readable.read()
+ assert.strictEqual(out, 'ABCDEF')
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-unpipe-event.js b/test/parallel/test-stream-unpipe-event.js
index 797e03182d..0ce62499ed 100644
--- a/test/parallel/test-stream-unpipe-event.js
+++ b/test/parallel/test-stream-unpipe-event.js
@@ -1,209 +1,109 @@
-"use strict";
+'use strict'
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+const tap = require('tap')
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+const silentConsole = {
+ log() {},
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
-if (process.version.indexOf('v0.8') === 0) {
- process.exit(0);
+ error() {}
}
-/**/
-
-
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert/');
-
-var _require = require('../../'),
- Writable = _require.Writable,
- Readable = _require.Readable;
-
-var NullWriteable =
-/*#__PURE__*/
-function (_Writable) {
- _inherits(NullWriteable, _Writable);
-
- function NullWriteable() {
- _classCallCheck(this, NullWriteable);
-
- return _possibleConstructorReturn(this, _getPrototypeOf(NullWriteable).apply(this, arguments));
- }
-
- _createClass(NullWriteable, [{
- key: "_write",
- value: function _write(chunk, encoding, callback) {
- return callback();
- }
- }]);
+const common = require('../common')
- return NullWriteable;
-}(Writable);
+const assert = require('assert')
-var QuickEndReadable =
-/*#__PURE__*/
-function (_Readable) {
- _inherits(QuickEndReadable, _Readable);
+const { Writable, Readable } = require('../../lib/ours/index')
- function QuickEndReadable() {
- _classCallCheck(this, QuickEndReadable);
-
- return _possibleConstructorReturn(this, _getPrototypeOf(QuickEndReadable).apply(this, arguments));
+class NullWriteable extends Writable {
+ _write(chunk, encoding, callback) {
+ return callback()
}
+}
- _createClass(QuickEndReadable, [{
- key: "_read",
- value: function _read() {
- this.push(null);
- }
- }]);
-
- return QuickEndReadable;
-}(Readable);
-
-var NeverEndReadable =
-/*#__PURE__*/
-function (_Readable2) {
- _inherits(NeverEndReadable, _Readable2);
-
- function NeverEndReadable() {
- _classCallCheck(this, NeverEndReadable);
-
- return _possibleConstructorReturn(this, _getPrototypeOf(NeverEndReadable).apply(this, arguments));
+class QuickEndReadable extends Readable {
+ _read() {
+ this.push(null)
}
+}
- _createClass(NeverEndReadable, [{
- key: "_read",
- value: function _read() {}
- }]);
-
- return NeverEndReadable;
-}(Readable);
+class NeverEndReadable extends Readable {
+ _read() {}
+}
{
- var dest = new NullWriteable();
- var src = new QuickEndReadable();
- dest.on('pipe', common.mustCall());
- dest.on('unpipe', common.mustCall());
- src.pipe(dest);
- setImmediate(function () {
- assert.strictEqual(src._readableState.pipesCount, 0);
- });
+ const dest = new NullWriteable()
+ const src = new QuickEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustCall())
+ src.pipe(dest)
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 0)
+ })
}
{
- var _dest = new NullWriteable();
-
- var _src = new NeverEndReadable();
-
- _dest.on('pipe', common.mustCall());
-
- _dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted'));
-
- _src.pipe(_dest);
-
- setImmediate(function () {
- assert.strictEqual(_src._readableState.pipesCount, 1);
- });
+ const dest = new NullWriteable()
+ const src = new NeverEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted'))
+ src.pipe(dest)
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 1)
+ })
}
{
- var _dest2 = new NullWriteable();
-
- var _src2 = new NeverEndReadable();
-
- _dest2.on('pipe', common.mustCall());
-
- _dest2.on('unpipe', common.mustCall());
-
- _src2.pipe(_dest2);
-
- _src2.unpipe(_dest2);
-
- setImmediate(function () {
- assert.strictEqual(_src2._readableState.pipesCount, 0);
- });
+ const dest = new NullWriteable()
+ const src = new NeverEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustCall())
+ src.pipe(dest)
+ src.unpipe(dest)
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 0)
+ })
}
{
- var _dest3 = new NullWriteable();
-
- var _src3 = new QuickEndReadable();
-
- _dest3.on('pipe', common.mustCall());
-
- _dest3.on('unpipe', common.mustCall());
-
- _src3.pipe(_dest3, {
+ const dest = new NullWriteable()
+ const src = new QuickEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustCall())
+ src.pipe(dest, {
end: false
- });
-
- setImmediate(function () {
- assert.strictEqual(_src3._readableState.pipesCount, 0);
- });
+ })
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 0)
+ })
}
{
- var _dest4 = new NullWriteable();
-
- var _src4 = new NeverEndReadable();
-
- _dest4.on('pipe', common.mustCall());
-
- _dest4.on('unpipe', common.mustNotCall('unpipe should not have been emitted'));
-
- _src4.pipe(_dest4, {
+ const dest = new NullWriteable()
+ const src = new NeverEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted'))
+ src.pipe(dest, {
end: false
- });
-
- setImmediate(function () {
- assert.strictEqual(_src4._readableState.pipesCount, 1);
- });
+ })
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 1)
+ })
}
{
- var _dest5 = new NullWriteable();
-
- var _src5 = new NeverEndReadable();
-
- _dest5.on('pipe', common.mustCall());
-
- _dest5.on('unpipe', common.mustCall());
-
- _src5.pipe(_dest5, {
+ const dest = new NullWriteable()
+ const src = new NeverEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustCall())
+ src.pipe(dest, {
end: false
- });
-
- _src5.unpipe(_dest5);
-
- setImmediate(function () {
- assert.strictEqual(_src5._readableState.pipesCount, 0);
- });
+ })
+ src.unpipe(dest)
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 0)
+ })
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
+/* replacement start */
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-unshift-empty-chunk.js b/test/parallel/test-stream-unshift-empty-chunk.js
index 2157dff99f..89dccd6c9f 100644
--- a/test/parallel/test-stream-unshift-empty-chunk.js
+++ b/test/parallel/test-stream-unshift-empty-chunk.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,67 +18,80 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/'); // This test verifies that stream.unshift(bufferShim.alloc(0)) or
+const assert = require('assert') // This test verifies that stream.unshift(Buffer.alloc(0)) or
// stream.unshift('') does not set state.reading=false.
+const Readable = require('../../lib/ours/index').Readable
-var Readable = require('../../').Readable;
-
-var r = new Readable();
-var nChunks = 10;
-var chunk = bufferShim.alloc(10, 'x');
+const r = new Readable()
+let nChunks = 10
+const chunk = Buffer.alloc(10, 'x')
r._read = function (n) {
- setImmediate(function () {
- r.push(--nChunks === 0 ? null : chunk);
- });
-};
-
-var readAll = false;
-var seen = [];
-r.on('readable', function () {
- var chunk;
-
- while (chunk = r.read()) {
- seen.push(chunk.toString()); // simulate only reading a certain amount of the data,
+ setImmediate(() => {
+ r.push(--nChunks === 0 ? null : chunk)
+ })
+}
+
+let readAll = false
+const seen = []
+r.on('readable', () => {
+ let chunk
+
+ while ((chunk = r.read()) !== null) {
+ seen.push(chunk.toString()) // Simulate only reading a certain amount of the data,
// and then putting the rest of the chunk back into the
// stream, like a parser might do. We just fill it with
// 'y' so that it's easy to see which bits were touched,
// and which were not.
- var putBack = bufferShim.alloc(readAll ? 0 : 5, 'y');
- readAll = !readAll;
- r.unshift(putBack);
+ const putBack = Buffer.alloc(readAll ? 0 : 5, 'y')
+ readAll = !readAll
+ r.unshift(putBack)
}
-});
-var expect = ['xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy'];
-r.on('end', function () {
- assert.deepStrictEqual(seen, expect);
-
- require('tap').pass();
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+const expect = [
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy'
+]
+r.on('end', () => {
+ assert.deepStrictEqual(seen, expect)
+ silentConsole.log('ok')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js
index fc0c64df48..fd09812727 100644
--- a/test/parallel/test-stream-unshift-read-race.js
+++ b/test/parallel/test-stream-unshift-read-race.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,155 +18,148 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/'); // This test verifies that:
+const assert = require('assert') // This test verifies that:
// 1. unshift() does not cause colliding _read() calls.
// 2. unshift() after the 'end' event is an error, but after the EOF
// signalling null, it is ok, and just creates a new readable chunk.
// 3. push() after the EOF signaling null is an error.
// 4. _read() is not called after pushing the EOF null chunk.
+const stream = require('../../lib/ours/index')
-var stream = require('../../');
+const hwm = 10
+const r = stream.Readable({
+ highWaterMark: hwm,
+ autoDestroy: false
+})
+const chunks = 10
+const data = Buffer.allocUnsafe(chunks * hwm + Math.ceil(hwm / 2))
-var hwm = 10;
-var r = stream.Readable({
- highWaterMark: hwm
-});
-var chunks = 10;
-var data = bufferShim.allocUnsafe(chunks * hwm + Math.ceil(hwm / 2));
-
-for (var i = 0; i < data.length; i++) {
- var c = 'asdf'.charCodeAt(i % 4);
- data[i] = c;
+for (let i = 0; i < data.length; i++) {
+ const c = 'asdf'.charCodeAt(i % 4)
+ data[i] = c
}
-var pos = 0;
-var pushedNull = false;
+let pos = 0
+let pushedNull = false
r._read = function (n) {
- assert(!pushedNull, '_read after null push'); // every third chunk is fast
+ assert(!pushedNull, '_read after null push') // Every third chunk is fast
- push(!(chunks % 3));
+ push(!(chunks % 3))
function push(fast) {
- assert(!pushedNull, 'push() after null push');
- var c = pos >= data.length ? null : data.slice(pos, pos + n);
- pushedNull = c === null;
+ assert(!pushedNull, 'push() after null push')
+ const c = pos >= data.length ? null : data.slice(pos, pos + n)
+ pushedNull = c === null
if (fast) {
- pos += n;
- r.push(c);
- if (c === null) pushError();
+ pos += n
+ r.push(c)
+ if (c === null) pushError()
} else {
setTimeout(function () {
- pos += n;
- r.push(c);
- if (c === null) pushError();
- }, 1);
+ pos += n
+ r.push(c)
+ if (c === null) pushError()
+ }, 1)
}
}
-};
+}
function pushError() {
- common.expectsError(function () {
- r.push(bufferShim.allocUnsafe(1));
- }, {
- code: 'ERR_STREAM_PUSH_AFTER_EOF',
- type: Error,
- message: 'stream.push() after EOF'
- });
+ r.unshift(Buffer.allocUnsafe(1))
+ w.end()
+ assert.throws(
+ () => {
+ r.push(Buffer.allocUnsafe(1))
+ },
+ {
+ code: 'ERR_STREAM_PUSH_AFTER_EOF',
+ name: 'Error',
+ message: 'stream.push() after EOF'
+ }
+ )
}
-var w = stream.Writable();
-var written = [];
+const w = stream.Writable()
+const written = []
w._write = function (chunk, encoding, cb) {
- written.push(chunk.toString());
- cb();
-};
-
-r.on('end', common.mustCall(function () {
- common.expectsError(function () {
- r.unshift(bufferShim.allocUnsafe(1));
- }, {
- code: 'ERR_STREAM_UNSHIFT_AFTER_END_EVENT',
- type: Error,
- message: 'stream.unshift() after end event'
- });
- w.end();
-}));
+ written.push(chunk.toString())
+ cb()
+}
+
+r.on('end', common.mustNotCall())
r.on('readable', function () {
- var chunk;
+ let chunk
while (null !== (chunk = r.read(10))) {
- w.write(chunk);
- if (chunk.length > 4) r.unshift(bufferShim.from('1234'));
+ w.write(chunk)
+ if (chunk.length > 4) r.unshift(Buffer.from('1234'))
}
-});
-w.on('finish', common.mustCall(function () {
- // each chunk should start with 1234, and then be asfdasdfasdf...
- // The first got pulled out before the first unshift('1234'), so it's
- // lacking that piece.
- assert.strictEqual(written[0], 'asdfasdfas');
- var asdf = 'd';
- console.error("0: ".concat(written[0]));
-
- for (var _i = 1; _i < written.length; _i++) {
- console.error("".concat(_i.toString(32), ": ").concat(written[_i]));
- assert.strictEqual(written[_i].slice(0, 4), '1234');
-
- for (var j = 4; j < written[_i].length; j++) {
- var _c = written[_i].charAt(j);
-
- assert.strictEqual(_c, asdf);
-
- switch (asdf) {
- case 'a':
- asdf = 's';
- break;
-
- case 's':
- asdf = 'd';
- break;
-
- case 'd':
- asdf = 'f';
- break;
-
- case 'f':
- asdf = 'a';
- break;
+})
+w.on(
+ 'finish',
+ common.mustCall(function () {
+ // Each chunk should start with 1234, and then be asfdasdfasdf...
+ // The first got pulled out before the first unshift('1234'), so it's
+ // lacking that piece.
+ assert.strictEqual(written[0], 'asdfasdfas')
+ let asdf = 'd'
+ silentConsole.error(`0: ${written[0]}`)
+
+ for (let i = 1; i < written.length; i++) {
+ silentConsole.error(`${i.toString(32)}: ${written[i]}`)
+ assert.strictEqual(written[i].slice(0, 4), '1234')
+
+ for (let j = 4; j < written[i].length; j++) {
+ const c = written[i].charAt(j)
+ assert.strictEqual(c, asdf)
+
+ switch (asdf) {
+ case 'a':
+ asdf = 's'
+ break
+
+ case 's':
+ asdf = 'd'
+ break
+
+ case 'd':
+ asdf = 'f'
+ break
+
+ case 'f':
+ asdf = 'a'
+ break
+ }
}
}
- }
-}));
+ })
+)
process.on('exit', function () {
- assert.strictEqual(written.length, 18);
-
- require('tap').pass();
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(written.length, 18)
+ silentConsole.log('ok')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-aborted.js b/test/parallel/test-stream-writable-aborted.js
new file mode 100644
index 0000000000..bc82a85c8d
--- /dev/null
+++ b/test/parallel/test-stream-writable-aborted.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+require('../common')
+
+const assert = require('assert')
+
+const { Writable } = require('../../lib/ours/index')
+
+{
+ const writable = new Writable({
+ write() {}
+ })
+ assert.strictEqual(writable.writableAborted, false)
+ writable.destroy()
+ assert.strictEqual(writable.writableAborted, true)
+}
+{
+ const writable = new Writable({
+ write() {}
+ })
+ assert.strictEqual(writable.writableAborted, false)
+ writable.end()
+ writable.destroy()
+ assert.strictEqual(writable.writableAborted, true)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-callback-twice.js b/test/parallel/test-stream-writable-callback-twice.js
new file mode 100644
index 0000000000..e636ca16f4
--- /dev/null
+++ b/test/parallel/test-stream-writable-callback-twice.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Writable } = require('../../lib/ours/index')
+
+const stream = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ cb()
+ }
+})
+stream.on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'Callback called multiple times',
+ code: 'ERR_MULTIPLE_CALLBACK'
+ })
+)
+stream.write('foo')
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js
index 441dc637e5..4f4b1b697e 100644
--- a/test/parallel/test-stream-writable-change-default-encoding.js
+++ b/test/parallel/test-stream-writable-change-default-encoding.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,103 +18,96 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert/');
+const tap = require('tap')
-var stream = require('../../');
+const silentConsole = {
+ log() {},
-var MyWritable =
-/*#__PURE__*/
-function (_stream$Writable) {
- _inherits(MyWritable, _stream$Writable);
+ error() {}
+}
+require('../common')
- function MyWritable(fn, options) {
- var _this;
+const assert = require('assert')
- _classCallCheck(this, MyWritable);
+const stream = require('../../lib/ours/index')
- _this = _possibleConstructorReturn(this, _getPrototypeOf(MyWritable).call(this, options));
- _this.fn = fn;
- return _this;
+class MyWritable extends stream.Writable {
+ constructor(fn, options) {
+ super(options)
+ this.fn = fn
}
- _createClass(MyWritable, [{
- key: "_write",
- value: function _write(chunk, encoding, callback) {
- this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding);
- callback();
+ _write(chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
+ }
+}
+
+;(function defaultCondingIsUtf8() {
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ assert.strictEqual(enc, 'utf8')
+ },
+ {
+ decodeStrings: false
}
- }]);
-
- return MyWritable;
-}(stream.Writable);
-
-(function defaultCondingIsUtf8() {
- var m = new MyWritable(function (isBuffer, type, enc) {
- assert.strictEqual(enc, 'utf8');
- }, {
- decodeStrings: false
- });
- m.write('foo');
- m.end();
-})();
-
-(function changeDefaultEncodingToAscii() {
- var m = new MyWritable(function (isBuffer, type, enc) {
- assert.strictEqual(enc, 'ascii');
- }, {
- decodeStrings: false
- });
- m.setDefaultEncoding('ascii');
- m.write('bar');
- m.end();
-})();
-
-common.expectsError(function changeDefaultEncodingToInvalidValue() {
- var m = new MyWritable(function (isBuffer, type, enc) {}, {
- decodeStrings: false
- });
- m.setDefaultEncoding({});
- m.write('bar');
- m.end();
-}, {
- type: TypeError,
- code: 'ERR_UNKNOWN_ENCODING',
- message: 'Unknown encoding: [object Object]'
-});
-
-(function checkVairableCaseEncoding() {
- var m = new MyWritable(function (isBuffer, type, enc) {
- assert.strictEqual(enc, 'ascii');
- }, {
- decodeStrings: false
- });
- m.setDefaultEncoding('AsCii');
- m.write('bar');
- m.end();
-})();
-
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ )
+ m.write('foo')
+ m.end()
+})()
+
+;(function changeDefaultEncodingToAscii() {
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ assert.strictEqual(enc, 'ascii')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.setDefaultEncoding('ascii')
+ m.write('bar')
+ m.end()
+})() // Change default encoding to invalid value.
+
+assert.throws(
+ () => {
+ const m = new MyWritable((isBuffer, type, enc) => {}, {
+ decodeStrings: false
+ })
+ m.setDefaultEncoding({})
+ m.write('bar')
+ m.end()
+ },
+ {
+ name: 'TypeError',
+ code: 'ERR_UNKNOWN_ENCODING',
+ message: 'Unknown encoding: {}'
+ }
+)
+
+;(function checkVariableCaseEncoding() {
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ assert.strictEqual(enc, 'ascii')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.setDefaultEncoding('AsCii')
+ m.write('bar')
+ m.end()
+})()
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-clear-buffer.js b/test/parallel/test-stream-writable-clear-buffer.js
new file mode 100644
index 0000000000..f1a390c6e3
--- /dev/null
+++ b/test/parallel/test-stream-writable-clear-buffer.js
@@ -0,0 +1,54 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+} // This test ensures that the _writeableState.bufferedRequestCount and
+// the actual buffered request count are the same.
+
+const common = require('../common')
+
+const Stream = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+class StreamWritable extends Stream.Writable {
+ constructor() {
+ super({
+ objectMode: true
+ })
+ } // Refs: https://github.com/nodejs/node/issues/6758
+ // We need a timer like on the original issue thread.
+ // Otherwise the code will never reach our test case.
+
+ _write(chunk, encoding, cb) {
+ setImmediate(cb)
+ }
+}
+
+const testStream = new StreamWritable()
+testStream.cork()
+
+for (let i = 1; i <= 5; i++) {
+ testStream.write(
+ i,
+ common.mustCall(() => {
+ assert.strictEqual(testStream._writableState.bufferedRequestCount, testStream._writableState.getBuffer().length)
+ })
+ )
+}
+
+testStream.end()
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-constructor-set-methods.js b/test/parallel/test-stream-writable-constructor-set-methods.js
index 8d4938822b..1821dff1a4 100644
--- a/test/parallel/test-stream-writable-constructor-set-methods.js
+++ b/test/parallel/test-stream-writable-constructor-set-methods.js
@@ -1,60 +1,58 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var _require = require('assert/'),
- strictEqual = _require.strictEqual;
+const assert = require('assert')
-var _require2 = require('../../'),
- Writable = _require2.Writable;
+const { Writable } = require('../../lib/ours/index')
-var w = new Writable();
-w.on('error', common.expectsError({
- type: Error,
- code: 'ERR_METHOD_NOT_IMPLEMENTED',
- message: 'The _write() method is not implemented'
-}));
-w.end(bufferShim.from('blerg'));
+const bufferBlerg = Buffer.from('blerg')
+const w = new Writable()
+assert.throws(
+ () => {
+ w.end(bufferBlerg)
+ },
+ {
+ name: 'Error',
+ code: 'ERR_METHOD_NOT_IMPLEMENTED',
+ message: 'The _write() method is not implemented'
+ }
+)
-var _write = common.mustCall(function (chunk, _, next) {
- next();
-});
+const _write = common.mustCall((chunk, _, next) => {
+ next()
+})
-var _writev = common.mustCall(function (chunks, next) {
- strictEqual(chunks.length, 2);
- next();
-});
+const _writev = common.mustCall((chunks, next) => {
+ assert.strictEqual(chunks.length, 2)
+ next()
+})
-var w2 = new Writable({
+const w2 = new Writable({
write: _write,
writev: _writev
-});
-strictEqual(w2._write, _write);
-strictEqual(w2._writev, _writev);
-w2.write(bufferShim.from('blerg'));
-w2.cork();
-w2.write(bufferShim.from('blerg'));
-w2.write(bufferShim.from('blerg'));
-w2.end();
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+assert.strictEqual(w2._write, _write)
+assert.strictEqual(w2._writev, _writev)
+w2.write(bufferBlerg)
+w2.cork()
+w2.write(bufferBlerg)
+w2.write(bufferBlerg)
+w2.end()
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-decoded-encoding.js b/test/parallel/test-stream-writable-decoded-encoding.js
index 1bf9ed6ea3..5528943b2b 100644
--- a/test/parallel/test-stream-writable-decoded-encoding.js
+++ b/test/parallel/test-stream-writable-decoded-encoding.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,82 +18,68 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-require('../common');
+const tap = require('tap')
-var assert = require('assert/');
+const silentConsole = {
+ log() {},
-var stream = require('../../');
-
-var MyWritable =
-/*#__PURE__*/
-function (_stream$Writable) {
- _inherits(MyWritable, _stream$Writable);
+ error() {}
+}
+require('../common')
- function MyWritable(fn, options) {
- var _this;
+const assert = require('assert')
- _classCallCheck(this, MyWritable);
+const stream = require('../../lib/ours/index')
- _this = _possibleConstructorReturn(this, _getPrototypeOf(MyWritable).call(this, options));
- _this.fn = fn;
- return _this;
+class MyWritable extends stream.Writable {
+ constructor(fn, options) {
+ super(options)
+ this.fn = fn
}
- _createClass(MyWritable, [{
- key: "_write",
- value: function _write(chunk, encoding, callback) {
- this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding);
- callback();
- }
- }]);
-
- return MyWritable;
-}(stream.Writable);
+ _write(chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
+ }
+}
{
- var m = new MyWritable(function (isBuffer, type, enc) {
- assert(isBuffer);
- assert.strictEqual(type, 'object');
- assert.strictEqual(enc, 'buffer');
- }, {
- decodeStrings: true
- });
- m.write('some-text', 'utf8');
- m.end();
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ assert(isBuffer)
+ assert.strictEqual(type, 'object')
+ assert.strictEqual(enc, 'buffer')
+ },
+ {
+ decodeStrings: true
+ }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
}
{
- var _m = new MyWritable(function (isBuffer, type, enc) {
- assert(!isBuffer);
- assert.strictEqual(type, 'string');
- assert.strictEqual(enc, 'utf8');
- }, {
- decodeStrings: false
- });
-
- _m.write('some-text', 'utf8');
-
- _m.end();
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ assert(!isBuffer)
+ assert.strictEqual(type, 'string')
+ assert.strictEqual(enc, 'utf8')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
}
-;
+/* replacement start */
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-destroy.js b/test/parallel/test-stream-writable-destroy.js
index dfc88f8166..8c6ea069d1 100644
--- a/test/parallel/test-stream-writable-destroy.js
+++ b/test/parallel/test-stream-writable-destroy.js
@@ -1,286 +1,581 @@
-"use strict";
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
-var common = require('../common');
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+
+const tap = require('tap')
-var _require = require('../../'),
- Writable = _require.Writable;
+const silentConsole = {
+ log() {},
-var assert = require('assert/');
+ error() {}
+}
+const common = require('../common')
+
+const { Writable, addAbortSignal } = require('../../lib/ours/index')
+
+const assert = require('assert')
{
- var write = new Writable({
- write: function write(chunk, enc, cb) {
- cb();
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
}
- });
- write.on('finish', common.mustNotCall());
- write.on('close', common.mustCall());
- write.destroy();
- assert.strictEqual(write.destroyed, true);
+ })
+ write.on('finish', common.mustNotCall())
+ write.on('close', common.mustCall())
+ write.destroy()
+ assert.strictEqual(write.destroyed, true)
}
{
- var _write = new Writable({
- write: function write(chunk, enc, cb) {
- cb();
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ this.destroy(new Error('asd'))
+ cb()
}
- });
-
- var expected = new Error('kaboom');
-
- _write.on('finish', common.mustNotCall());
-
- _write.on('close', common.mustCall());
-
- _write.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, expected);
- }));
-
- _write.destroy(expected);
-
- assert.strictEqual(_write.destroyed, true);
+ })
+ write.on('error', common.mustCall())
+ write.on('finish', common.mustNotCall())
+ write.end('asd')
+ assert.strictEqual(write.destroyed, true)
}
{
- var _write2 = new Writable({
- write: function write(chunk, enc, cb) {
- cb();
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
}
- });
-
- _write2._destroy = function (err, cb) {
- assert.strictEqual(err, _expected);
- cb(err);
- };
-
- var _expected = new Error('kaboom');
-
- _write2.on('finish', common.mustNotCall('no finish event'));
-
- _write2.on('close', common.mustCall());
-
- _write2.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, _expected);
- }));
+ })
+ const expected = new Error('kaboom')
+ write.on('finish', common.mustNotCall())
+ write.on('close', common.mustCall())
+ write.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ write.destroy(expected)
+ assert.strictEqual(write.destroyed, true)
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
- _write2.destroy(_expected);
+ write._destroy = function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb(err)
+ }
- assert.strictEqual(_write2.destroyed, true);
+ const expected = new Error('kaboom')
+ write.on('finish', common.mustNotCall('no finish event'))
+ write.on('close', common.mustCall())
+ write.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ write.destroy(expected)
+ assert.strictEqual(write.destroyed, true)
}
{
- var _write3 = new Writable({
- write: function write(chunk, enc, cb) {
- cb();
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
},
+
destroy: common.mustCall(function (err, cb) {
- assert.strictEqual(err, _expected2);
- cb();
+ assert.strictEqual(err, expected)
+ cb()
})
- });
-
- var _expected2 = new Error('kaboom');
-
- _write3.on('finish', common.mustNotCall('no finish event'));
-
- _write3.on('close', common.mustCall()); // error is swallowed by the custom _destroy
-
-
- _write3.on('error', common.mustNotCall('no error event'));
-
- _write3.destroy(_expected2);
-
- assert.strictEqual(_write3.destroyed, true);
+ })
+ const expected = new Error('kaboom')
+ write.on('finish', common.mustNotCall('no finish event'))
+ write.on('close', common.mustCall()) // Error is swallowed by the custom _destroy
+
+ write.on('error', common.mustNotCall('no error event'))
+ write.destroy(expected)
+ assert.strictEqual(write.destroyed, true)
}
{
- var _write4 = new Writable({
- write: function write(chunk, enc, cb) {
- cb();
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
}
- });
-
- _write4._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, null);
- cb();
- });
-
- _write4.destroy();
-
- assert.strictEqual(_write4.destroyed, true);
+ })
+ write._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb()
+ })
+ write.destroy()
+ assert.strictEqual(write.destroyed, true)
}
{
- var _write5 = new Writable({
- write: function write(chunk, enc, cb) {
- cb();
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
}
- });
-
- _write5._destroy = common.mustCall(function (err, cb) {
- var _this = this;
-
- assert.strictEqual(err, null);
- process.nextTick(function () {
- _this.end();
-
- cb();
- });
- });
- var fail = common.mustNotCall('no finish event');
-
- _write5.on('finish', fail);
-
- _write5.on('close', common.mustCall());
-
- _write5.destroy();
-
- _write5.removeListener('finish', fail);
-
- _write5.on('finish', common.mustCall());
-
- assert.strictEqual(_write5.destroyed, true);
+ })
+ write._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ process.nextTick(() => {
+ this.end()
+ cb()
+ })
+ })
+ const fail = common.mustNotCall('no finish event')
+ write.on('finish', fail)
+ write.on('close', common.mustCall())
+ write.destroy()
+ assert.strictEqual(write.destroyed, true)
}
{
- var _write6 = new Writable({
- write: function write(chunk, enc, cb) {
- cb();
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
}
- });
-
- var _expected3 = new Error('kaboom');
-
- _write6._destroy = common.mustCall(function (err, cb) {
- assert.strictEqual(err, null);
- cb(_expected3);
- });
-
- _write6.on('close', common.mustCall());
-
- _write6.on('finish', common.mustNotCall('no finish event'));
-
- _write6.on('error', common.mustCall(function (err) {
- assert.strictEqual(err, _expected3);
- }));
-
- _write6.destroy();
-
- assert.strictEqual(_write6.destroyed, true);
+ })
+ const expected = new Error('kaboom')
+ write._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb(expected)
+ })
+ write.on('close', common.mustCall())
+ write.on('finish', common.mustNotCall('no finish event'))
+ write.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ write.destroy()
+ assert.strictEqual(write.destroyed, true)
}
{
// double error case
- var _write7 = new Writable({
- write: function write(chunk, enc, cb) {
- cb();
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
}
- });
-
- _write7.on('close', common.mustCall());
-
- _write7.on('error', common.mustCall());
-
- _write7.destroy(new Error('kaboom 1'));
-
- _write7.destroy(new Error('kaboom 2'));
-
- assert.strictEqual(_write7._writableState.errorEmitted, true);
- assert.strictEqual(_write7.destroyed, true);
+ })
+ let ticked = false
+ write.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(ticked, true)
+ })
+ )
+ write.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.message, 'kaboom 1')
+ assert.strictEqual(write._writableState.errorEmitted, true)
+ })
+ )
+ const expected = new Error('kaboom 1')
+ write.destroy(expected)
+ write.destroy(new Error('kaboom 2'))
+ assert.strictEqual(write._writableState.errored, expected)
+ assert.strictEqual(write._writableState.errorEmitted, false)
+ assert.strictEqual(write.destroyed, true)
+ ticked = true
}
{
- var writable = new Writable({
+ const writable = new Writable({
destroy: common.mustCall(function (err, cb) {
- process.nextTick(cb, new Error('kaboom 1'));
+ process.nextTick(cb, new Error('kaboom 1'))
}),
- write: function write(chunk, enc, cb) {
- cb();
+
+ write(chunk, enc, cb) {
+ cb()
}
- });
- writable.on('close', common.mustCall());
- writable.on('error', common.expectsError({
- type: Error,
- message: 'kaboom 2'
- }));
- writable.destroy();
- assert.strictEqual(writable.destroyed, true);
- assert.strictEqual(writable._writableState.errorEmitted, false); // Test case where `writable.destroy()` is called again with an error before
+ })
+ let ticked = false
+ writable.on(
+ 'close',
+ common.mustCall(() => {
+ writable.on('error', common.mustNotCall())
+ writable.destroy(new Error('hello'))
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(writable._writableState.errorEmitted, true)
+ })
+ )
+ writable.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.message, 'kaboom 1')
+ assert.strictEqual(writable._writableState.errorEmitted, true)
+ })
+ )
+ writable.destroy()
+ assert.strictEqual(writable.destroyed, true)
+ assert.strictEqual(writable._writableState.errored, null)
+ assert.strictEqual(writable._writableState.errorEmitted, false) // Test case where `writable.destroy()` is called again with an error before
// the `_destroy()` callback is called.
- writable.destroy(new Error('kaboom 2'));
- assert.strictEqual(writable._writableState.errorEmitted, true);
+ writable.destroy(new Error('kaboom 2'))
+ assert.strictEqual(writable._writableState.errorEmitted, false)
+ assert.strictEqual(writable._writableState.errored, null)
+ ticked = true
}
{
- var _write8 = new Writable({
- write: function write(chunk, enc, cb) {
- cb();
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
}
- });
+ })
+ write.destroyed = true
+ assert.strictEqual(write.destroyed, true) // The internal destroy() mechanism should not be triggered
- _write8.destroyed = true;
- assert.strictEqual(_write8.destroyed, true); // the internal destroy() mechanism should not be triggered
-
- _write8.on('close', common.mustNotCall());
-
- _write8.destroy();
+ write.on('close', common.mustNotCall())
+ write.destroy()
}
{
function MyWritable() {
- assert.strictEqual(this.destroyed, false);
- this.destroyed = false;
- Writable.call(this);
+ assert.strictEqual(this.destroyed, false)
+ this.destroyed = false
+ Writable.call(this)
}
- Object.setPrototypeOf(MyWritable.prototype, Writable.prototype);
- Object.setPrototypeOf(MyWritable, Writable);
- new MyWritable();
+ Object.setPrototypeOf(MyWritable.prototype, Writable.prototype)
+ Object.setPrototypeOf(MyWritable, Writable)
+ new MyWritable()
}
{
- // destroy and destroy callback
- var _write9 = new Writable({
- write: function write(chunk, enc, cb) {
- cb();
+ // Destroy and destroy callback
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
}
- });
-
- _write9.destroy();
-
- var _expected4 = new Error('kaboom');
-
- _write9.destroy(_expected4, common.mustCall(function (err) {
- assert.strictEqual(err, _expected4);
- }));
+ })
+ write.destroy()
+ const expected = new Error('kaboom')
+ write.destroy(
+ expected,
+ common.mustCall((err) => {
+ assert.strictEqual(err, undefined)
+ })
+ )
}
{
// Checks that `._undestroy()` restores the state so that `final` will be
// called again.
- var _write10 = new Writable({
+ const write = new Writable({
write: common.mustNotCall(),
- final: common.mustCall(function (cb) {
- return cb();
- }, 2)
- });
+ final: common.mustCall((cb) => cb(), 2),
+ autoDestroy: true
+ })
+ write.end()
+ write.once(
+ 'close',
+ common.mustCall(() => {
+ write._undestroy()
+
+ write.end()
+ })
+ )
+}
+{
+ const write = new Writable()
+ write.destroy()
+ write.on('error', common.mustNotCall())
+ write.write(
+ 'asd',
+ common.expectsError({
+ name: 'Error',
+ code: 'ERR_STREAM_DESTROYED',
+ message: 'Cannot call write after a stream was destroyed'
+ })
+ )
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.on('error', common.mustNotCall())
+ write.cork()
+ write.write('asd', common.mustCall())
+ write.uncork()
+ write.cork()
+ write.write(
+ 'asd',
+ common.expectsError({
+ name: 'Error',
+ code: 'ERR_STREAM_DESTROYED',
+ message: 'Cannot call write after a stream was destroyed'
+ })
+ )
+ write.destroy()
+ write.write(
+ 'asd',
+ common.expectsError({
+ name: 'Error',
+ code: 'ERR_STREAM_DESTROYED',
+ message: 'Cannot call write after a stream was destroyed'
+ })
+ )
+ write.uncork()
+}
+{
+ // Call end(cb) after error & destroy
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb(new Error('asd'))
+ }
+ })
+ write.on(
+ 'error',
+ common.mustCall(() => {
+ write.destroy()
+ let ticked = false
+ write.end(
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED')
+ })
+ )
+ ticked = true
+ })
+ )
+ write.write('asd')
+}
+{
+ // Call end(cb) after finish & destroy
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.on(
+ 'finish',
+ common.mustCall(() => {
+ write.destroy()
+ let ticked = false
+ write.end(
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED')
+ })
+ )
+ ticked = true
+ })
+ )
+ write.end()
+}
+{
+ // Call end(cb) after error & destroy and don't trigger
+ // unhandled exception.
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ process.nextTick(cb)
+ }
+ })
- _write10.end();
+ const _err = new Error('asd')
- _write10.destroy();
+ write.once(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'asd')
+ })
+ )
+ write.end(
+ 'asd',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ write.destroy(_err)
+}
+{
+ // Call buffered write callback with error
+ const _err = new Error('asd')
- _write10._undestroy();
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ process.nextTick(cb, _err)
+ },
- _write10.end();
+ autoDestroy: false
+ })
+ write.cork()
+ write.write(
+ 'asd',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ write.write(
+ 'asd',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ write.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ write.uncork()
+}
+{
+ // Ensure callback order.
+ let state = 0
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ // `setImmediate()` is used on purpose to ensure the callback is called
+ // after `process.nextTick()` callbacks.
+ setImmediate(cb)
+ }
+ })
+ write.write(
+ 'asd',
+ common.mustCall(() => {
+ assert.strictEqual(state++, 0)
+ })
+ )
+ write.write(
+ 'asd',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED')
+ assert.strictEqual(state++, 1)
+ })
+ )
+ write.destroy()
}
-;
+{
+ const write = new Writable({
+ autoDestroy: false,
-(function () {
- var t = require('tap');
+ write(chunk, enc, cb) {
+ cb()
+ cb()
+ }
+ })
+ write.on(
+ 'error',
+ common.mustCall(() => {
+ assert(write._writableState.errored)
+ })
+ )
+ write.write('asd')
+}
+{
+ const ac = new AbortController()
+ const write = addAbortSignal(
+ ac.signal,
+ new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ )
+ write.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ assert.strictEqual(write.destroyed, true)
+ })
+ )
+ write.write('asd')
+ ac.abort()
+}
+{
+ const ac = new AbortController()
+ const write = new Writable({
+ signal: ac.signal,
- t.pass('sync run');
-})();
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ assert.strictEqual(write.destroyed, true)
+ })
+ )
+ write.write('asd')
+ ac.abort()
+}
+{
+ const signal = AbortSignal.abort()
+ const write = new Writable({
+ signal,
-var _list = process.listeners('uncaughtException');
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ assert.strictEqual(write.destroyed, true)
+ })
+ )
+}
+{
+ // Destroy twice
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.end(common.mustCall())
+ write.destroy()
+ write.destroy()
+}
+{
+ // https://github.com/nodejs/node/issues/39356
+ const s = new Writable({
+ final() {}
+ })
-process.removeAllListeners('uncaughtException');
+ const _err = new Error('oh no') // Remove `callback` and it works
-_list.pop();
+ s.end(
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ s.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ s.destroy(_err)
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-end-cb-error.js b/test/parallel/test-stream-writable-end-cb-error.js
new file mode 100644
index 0000000000..c18650055a
--- /dev/null
+++ b/test/parallel/test-stream-writable-end-cb-error.js
@@ -0,0 +1,123 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const stream = require('../../lib/ours/index')
+
+{
+ // Invoke end callback on failure.
+ const writable = new stream.Writable()
+
+ const _err = new Error('kaboom')
+
+ writable._write = (chunk, encoding, cb) => {
+ process.nextTick(cb, _err)
+ }
+
+ writable.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ writable.write('asd')
+ writable.end(
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ writable.end(
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+}
+{
+ // Don't invoke end callback twice
+ const writable = new stream.Writable()
+
+ writable._write = (chunk, encoding, cb) => {
+ process.nextTick(cb)
+ }
+
+ let called = false
+ writable.end(
+ 'asd',
+ common.mustCall((err) => {
+ called = true
+ assert.strictEqual(err, undefined)
+ })
+ )
+ writable.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'kaboom')
+ })
+ )
+ writable.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(called, true)
+ writable.emit('error', new Error('kaboom'))
+ })
+ )
+}
+{
+ const w = new stream.Writable({
+ write(chunk, encoding, callback) {
+ setImmediate(callback)
+ },
+
+ finish(callback) {
+ setImmediate(callback)
+ }
+ })
+ w.end(
+ 'testing ended state',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ assert.strictEqual(w.destroyed, false)
+ assert.strictEqual(w.writableEnded, true)
+ w.end(
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ assert.strictEqual(w.destroyed, false)
+ assert.strictEqual(w.writableEnded, true)
+ w.end(
+ 'end',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ assert.strictEqual(w.destroyed, true)
+ w.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ w.on('finish', common.mustNotCall())
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-end-cb-uncaught.js b/test/parallel/test-stream-writable-end-cb-uncaught.js
new file mode 100644
index 0000000000..802be03982
--- /dev/null
+++ b/test/parallel/test-stream-writable-end-cb-uncaught.js
@@ -0,0 +1,49 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const stream = require('../../lib/ours/index')
+
+process.on(
+ 'uncaughtException',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'kaboom')
+ })
+)
+const writable = new stream.Writable()
+
+const _err = new Error('kaboom')
+
+writable._write = (chunk, encoding, cb) => {
+ cb()
+}
+
+writable._final = (cb) => {
+ cb(_err)
+}
+
+writable.write('asd')
+writable.end(
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-end-multiple.js b/test/parallel/test-stream-writable-end-multiple.js
new file mode 100644
index 0000000000..868c79b491
--- /dev/null
+++ b/test/parallel/test-stream-writable-end-multiple.js
@@ -0,0 +1,46 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const stream = require('../../lib/ours/index')
+
+const writable = new stream.Writable()
+
+writable._write = (chunk, encoding, cb) => {
+ setTimeout(() => cb(), 10)
+}
+
+writable.end('testing ended state', common.mustCall())
+writable.end(common.mustCall())
+writable.on(
+ 'finish',
+ common.mustCall(() => {
+ let ticked = false
+ writable.end(
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED')
+ })
+ )
+ ticked = true
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-ended-state.js b/test/parallel/test-stream-writable-ended-state.js
index ac8c6f44b4..f91d66b56e 100644
--- a/test/parallel/test-stream-writable-ended-state.js
+++ b/test/parallel/test-stream-writable-ended-state.js
@@ -1,42 +1,51 @@
-"use strict";
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert/');
-
-var stream = require('../../');
-
-var writable = new stream.Writable();
-
-writable._write = function (chunk, encoding, cb) {
- assert.strictEqual(writable._writableState.ended, false);
- cb();
-};
-
-assert.strictEqual(writable._writableState.ended, false);
-writable.end('testing ended state', common.mustCall(function () {
- assert.strictEqual(writable._writableState.ended, true);
-}));
-assert.strictEqual(writable._writableState.ended, true);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const stream = require('../../lib/ours/index')
+
+const writable = new stream.Writable()
+
+writable._write = (chunk, encoding, cb) => {
+ assert.strictEqual(writable._writableState.ended, false)
+ assert.strictEqual(writable._writableState.writable, undefined)
+ assert.strictEqual(writable.writableEnded, false)
+ cb()
+}
+
+assert.strictEqual(writable._writableState.ended, false)
+assert.strictEqual(writable._writableState.writable, undefined)
+assert.strictEqual(writable.writable, true)
+assert.strictEqual(writable.writableEnded, false)
+writable.end(
+ 'testing ended state',
+ common.mustCall(() => {
+ assert.strictEqual(writable._writableState.ended, true)
+ assert.strictEqual(writable._writableState.writable, undefined)
+ assert.strictEqual(writable.writable, false)
+ assert.strictEqual(writable.writableEnded, true)
+ })
+)
+assert.strictEqual(writable._writableState.ended, true)
+assert.strictEqual(writable._writableState.writable, undefined)
+assert.strictEqual(writable.writable, false)
+assert.strictEqual(writable.writableEnded, true)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-final-async.js b/test/parallel/test-stream-writable-final-async.js
new file mode 100644
index 0000000000..f4643db8ee
--- /dev/null
+++ b/test/parallel/test-stream-writable-final-async.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Duplex } = require('../../lib/ours/index')
+
+const st = require('timers').setTimeout
+
+function setTimeout(ms) {
+ return new Promise((resolve) => {
+ st(resolve, ms)
+ })
+}
+
+{
+ class Foo extends Duplex {
+ async _final(callback) {
+ await setTimeout(common.platformTimeout(1))
+ callback()
+ }
+
+ _read() {}
+ }
+
+ const foo = new Foo()
+ foo._write = common.mustCall((chunk, encoding, cb) => {
+ cb()
+ })
+ foo.end('test', common.mustCall())
+ foo.on('error', common.mustNotCall())
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-final-destroy.js b/test/parallel/test-stream-writable-final-destroy.js
new file mode 100644
index 0000000000..bb5b529d4f
--- /dev/null
+++ b/test/parallel/test-stream-writable-final-destroy.js
@@ -0,0 +1,39 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Writable } = require('../../lib/ours/index')
+
+{
+ const w = new Writable({
+ write(chunk, encoding, callback) {
+ callback(null)
+ },
+
+ final(callback) {
+ queueMicrotask(callback)
+ }
+ })
+ w.end()
+ w.destroy()
+ w.on('prefinish', common.mustNotCall())
+ w.on('finish', common.mustNotCall())
+ w.on('close', common.mustCall())
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-final-throw.js b/test/parallel/test-stream-writable-final-throw.js
new file mode 100644
index 0000000000..1bd88cbbb9
--- /dev/null
+++ b/test/parallel/test-stream-writable-final-throw.js
@@ -0,0 +1,44 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Duplex } = require('../../lib/ours/index')
+
+{
+ class Foo extends Duplex {
+ _final(callback) {
+ throw new Error('fhqwhgads')
+ }
+
+ _read() {}
+ }
+
+ const foo = new Foo()
+ foo._write = common.mustCall((chunk, encoding, cb) => {
+ cb()
+ })
+ foo.end(
+ 'test',
+ common.expectsError({
+ message: 'fhqwhgads'
+ })
+ )
+ foo.on('error', common.mustCall())
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-finish-destroyed.js b/test/parallel/test-stream-writable-finish-destroyed.js
new file mode 100644
index 0000000000..e18a70f224
--- /dev/null
+++ b/test/parallel/test-stream-writable-finish-destroyed.js
@@ -0,0 +1,62 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Writable } = require('../../lib/ours/index')
+
+{
+ const w = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ cb()
+ })
+ )
+ })
+ })
+ w.on('finish', common.mustNotCall())
+ w.end('asd')
+ w.destroy()
+}
+{
+ const w = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ cb()
+ w.end()
+ })
+ )
+ })
+ })
+ w.on('finish', common.mustNotCall())
+ w.write('asd')
+ w.destroy()
+}
+{
+ const w = new Writable({
+ write() {}
+ })
+ w.on('finish', common.mustNotCall())
+ w.end()
+ w.destroy()
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-finished-state.js b/test/parallel/test-stream-writable-finished-state.js
index 641602a093..d4fefe06f1 100644
--- a/test/parallel/test-stream-writable-finished-state.js
+++ b/test/parallel/test-stream-writable-finished-state.js
@@ -1,44 +1,45 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var writable = new stream.Writable();
+const writable = new stream.Writable()
-writable._write = function (chunk, encoding, cb) {
+writable._write = (chunk, encoding, cb) => {
// The state finished should start in false.
- assert.strictEqual(writable._writableState.finished, false);
- cb();
-};
-
-writable.on('finish', common.mustCall(function () {
- assert.strictEqual(writable._writableState.finished, true);
-}));
-writable.end('testing finished state', common.mustCall(function () {
- assert.strictEqual(writable._writableState.finished, true);
-}));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(writable._writableState.finished, false)
+ cb()
+}
+
+writable.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(writable._writableState.finished, true)
+ })
+)
+writable.end(
+ 'testing finished state',
+ common.mustCall(() => {
+ assert.strictEqual(writable._writableState.finished, true)
+ })
+)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-finished.js b/test/parallel/test-stream-writable-finished.js
new file mode 100644
index 0000000000..8d31f4fea2
--- /dev/null
+++ b/test/parallel/test-stream-writable-finished.js
@@ -0,0 +1,116 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Writable } = require('../../lib/ours/index')
+
+const assert = require('assert') // basic
+
+{
+ // Find it on Writable.prototype
+ assert(Reflect.has(Writable.prototype, 'writableFinished'))
+} // event
+
+{
+ const writable = new Writable()
+
+ writable._write = (chunk, encoding, cb) => {
+ // The state finished should start in false.
+ assert.strictEqual(writable.writableFinished, false)
+ cb()
+ }
+
+ writable.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(writable.writableFinished, true)
+ })
+ )
+ writable.end(
+ 'testing finished state',
+ common.mustCall(() => {
+ assert.strictEqual(writable.writableFinished, true)
+ })
+ )
+}
+{
+ // Emit finish asynchronously.
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb()
+ }
+ })
+ w.end()
+ w.on('finish', common.mustCall())
+}
+{
+ // Emit prefinish synchronously.
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb()
+ }
+ })
+ let sync = true
+ w.on(
+ 'prefinish',
+ common.mustCall(() => {
+ assert.strictEqual(sync, true)
+ })
+ )
+ w.end()
+ sync = false
+}
+{
+ // Emit prefinish synchronously w/ final.
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb()
+ },
+
+ final(cb) {
+ cb()
+ }
+ })
+ let sync = true
+ w.on(
+ 'prefinish',
+ common.mustCall(() => {
+ assert.strictEqual(sync, true)
+ })
+ )
+ w.end()
+ sync = false
+}
+{
+ // Call _final synchronously.
+ let sync = true
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb()
+ },
+
+ final: common.mustCall((cb) => {
+ assert.strictEqual(sync, true)
+ cb()
+ })
+ })
+ w.end()
+ sync = false
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-invalid-chunk.js b/test/parallel/test-stream-writable-invalid-chunk.js
new file mode 100644
index 0000000000..4290581165
--- /dev/null
+++ b/test/parallel/test-stream-writable-invalid-chunk.js
@@ -0,0 +1,60 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const stream = require('../../lib/ours/index')
+
+const assert = require('assert')
+
+function testWriteType(val, objectMode, code) {
+ const writable = new stream.Writable({
+ objectMode,
+ write: () => {}
+ })
+ writable.on('error', common.mustNotCall())
+
+ if (code) {
+ assert.throws(
+ () => {
+ writable.write(val)
+ },
+ {
+ code
+ }
+ )
+ } else {
+ writable.write(val)
+ }
+}
+
+testWriteType([], false, 'ERR_INVALID_ARG_TYPE')
+testWriteType({}, false, 'ERR_INVALID_ARG_TYPE')
+testWriteType(0, false, 'ERR_INVALID_ARG_TYPE')
+testWriteType(true, false, 'ERR_INVALID_ARG_TYPE')
+testWriteType(0.0, false, 'ERR_INVALID_ARG_TYPE')
+testWriteType(undefined, false, 'ERR_INVALID_ARG_TYPE')
+testWriteType(null, false, 'ERR_STREAM_NULL_VALUES')
+testWriteType([], true)
+testWriteType({}, true)
+testWriteType(0, true)
+testWriteType(true, true)
+testWriteType(0.0, true)
+testWriteType(undefined, true)
+testWriteType(null, true, 'ERR_STREAM_NULL_VALUES')
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-needdrain-state.js b/test/parallel/test-stream-writable-needdrain-state.js
index 0dc2b1152b..0876c3742c 100644
--- a/test/parallel/test-stream-writable-needdrain-state.js
+++ b/test/parallel/test-stream-writable-needdrain-state.js
@@ -1,45 +1,45 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
-var transform = new stream.Transform({
+const transform = new stream.Transform({
transform: _transform,
highWaterMark: 1
-});
+})
function _transform(chunk, encoding, cb) {
- assert.strictEqual(transform._writableState.needDrain, true);
- cb();
+ process.nextTick(() => {
+ assert.strictEqual(transform._writableState.needDrain, true)
+ cb()
+ })
}
-assert.strictEqual(transform._writableState.needDrain, false);
-transform.write('asdasd', common.mustCall(function () {
- assert.strictEqual(transform._writableState.needDrain, false);
-}));
-assert.strictEqual(transform._writableState.needDrain, true);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+assert.strictEqual(transform._writableState.needDrain, false)
+transform.write(
+ 'asdasd',
+ common.mustCall(() => {
+ assert.strictEqual(transform._writableState.needDrain, false)
+ })
+)
+assert.strictEqual(transform._writableState.needDrain, true)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-null.js b/test/parallel/test-stream-writable-null.js
index a6e8794a33..e60d895a52 100644
--- a/test/parallel/test-stream-writable-null.js
+++ b/test/parallel/test-stream-writable-null.js
@@ -1,120 +1,81 @@
-"use strict";
+'use strict'
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+const tap = require('tap')
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+const silentConsole = {
+ log() {},
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var assert = require('assert/');
-
-var stream = require('../../');
+ error() {}
+}
+const common = require('../common')
-var MyWritable =
-/*#__PURE__*/
-function (_stream$Writable) {
- _inherits(MyWritable, _stream$Writable);
+const assert = require('assert')
- function MyWritable() {
- _classCallCheck(this, MyWritable);
+const stream = require('../../lib/ours/index')
- return _possibleConstructorReturn(this, _getPrototypeOf(MyWritable).apply(this, arguments));
+class MyWritable extends stream.Writable {
+ constructor(options) {
+ super({
+ autoDestroy: false,
+ ...options
+ })
}
- _createClass(MyWritable, [{
- key: "_write",
- value: function _write(chunk, encoding, callback) {
- assert.notStrictEqual(chunk, null);
- callback();
- }
- }]);
-
- return MyWritable;
-}(stream.Writable);
+ _write(chunk, encoding, callback) {
+ assert.notStrictEqual(chunk, null)
+ callback()
+ }
+}
-common.expectsError(function () {
- var m = new MyWritable({
- objectMode: true
- });
- m.write(null, function (err) {
- return assert.ok(err);
- });
-}, {
- code: 'ERR_STREAM_NULL_VALUES',
- type: TypeError,
- message: 'May not write null values to stream'
-});
{
- // Should not throw.
- var m = new MyWritable({
+ const m = new MyWritable({
objectMode: true
- }).on('error', assert);
- m.write(null, assert);
+ })
+ m.on('error', common.mustNotCall())
+ assert.throws(
+ () => {
+ m.write(null)
+ },
+ {
+ code: 'ERR_STREAM_NULL_VALUES'
+ }
+ )
}
-common.expectsError(function () {
- var m = new MyWritable();
- m.write(false, function (err) {
- return assert.ok(err);
- });
-}, {
- code: 'ERR_INVALID_ARG_TYPE',
- type: TypeError
-});
{
- // Should not throw.
- var _m = new MyWritable().on('error', assert);
-
- _m.write(false, assert);
+ const m = new MyWritable()
+ m.on('error', common.mustNotCall())
+ assert.throws(
+ () => {
+ m.write(false)
+ },
+ {
+ code: 'ERR_INVALID_ARG_TYPE'
+ }
+ )
}
{
// Should not throw.
- var _m2 = new MyWritable({
+ const m = new MyWritable({
objectMode: true
- });
-
- _m2.write(false, assert.ifError);
+ })
+ m.write(false, assert.ifError)
}
{
// Should not throw.
- var _m3 = new MyWritable({
+ const m = new MyWritable({
objectMode: true
- }).on('error', function (e) {
- assert.ifError(e || new Error('should not get here'));
- });
-
- _m3.write(false, assert.ifError);
+ }).on('error', (e) => {
+ assert.ifError(e || new Error('should not get here'))
+ })
+ m.write(false, assert.ifError)
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
+/* replacement start */
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-properties.js b/test/parallel/test-stream-writable-properties.js
new file mode 100644
index 0000000000..80ed31463e
--- /dev/null
+++ b/test/parallel/test-stream-writable-properties.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+require('../common')
+
+const assert = require('assert')
+
+const { Writable } = require('../../lib/ours/index')
+
+{
+ const w = new Writable()
+ assert.strictEqual(w.writableCorked, 0)
+ w.uncork()
+ assert.strictEqual(w.writableCorked, 0)
+ w.cork()
+ assert.strictEqual(w.writableCorked, 1)
+ w.cork()
+ assert.strictEqual(w.writableCorked, 2)
+ w.uncork()
+ assert.strictEqual(w.writableCorked, 1)
+ w.uncork()
+ assert.strictEqual(w.writableCorked, 0)
+ w.uncork()
+ assert.strictEqual(w.writableCorked, 0)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-samecb-singletick.js b/test/parallel/test-stream-writable-samecb-singletick.js
new file mode 100644
index 0000000000..bb3cf482fd
--- /dev/null
+++ b/test/parallel/test-stream-writable-samecb-singletick.js
@@ -0,0 +1,57 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Console } = require('console')
+
+const { Writable } = require('../../lib/ours/index')
+
+const async_hooks = require('async_hooks') // Make sure that repeated calls to silentConsole.log(), and by extension
+// stream.write() for the underlying stream, allocate exactly 1 tick object.
+// At the time of writing, that is enough to ensure a flat memory profile
+// from repeated silentConsole.log() calls, rather than having callbacks pile up
+// over time, assuming that data can be written synchronously.
+// Refs: https://github.com/nodejs/node/issues/18013
+// Refs: https://github.com/nodejs/node/issues/18367
+
+const checkTickCreated = common.mustCall()
+const hook = async_hooks
+ .createHook({
+ init(id, type, triggerId, resoure) {
+ if (type === 'TickObject') checkTickCreated()
+ }
+ })
+ .enable()
+const console = new Console(
+ new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ cb()
+ }, 100)
+ })
+)
+
+for (let i = 0; i < 100; i++) console.log(i)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ hook.disable()
+})
+/* replacement end */
+
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-writable.js b/test/parallel/test-stream-writable-writable.js
new file mode 100644
index 0000000000..d5918c242f
--- /dev/null
+++ b/test/parallel/test-stream-writable-writable.js
@@ -0,0 +1,64 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Writable } = require('../../lib/ours/index')
+
+{
+ const w = new Writable({
+ write() {}
+ })
+ assert.strictEqual(w.writable, true)
+ w.destroy()
+ assert.strictEqual(w.writable, false)
+}
+{
+ const w = new Writable({
+ write: common.mustCall((chunk, encoding, callback) => {
+ callback(new Error())
+ })
+ })
+ assert.strictEqual(w.writable, true)
+ w.write('asd')
+ assert.strictEqual(w.writable, false)
+ w.on('error', common.mustCall())
+}
+{
+ const w = new Writable({
+ write: common.mustCall((chunk, encoding, callback) => {
+ process.nextTick(() => {
+ callback(new Error())
+ assert.strictEqual(w.writable, false)
+ })
+ })
+ })
+ w.write('asd')
+ w.on('error', common.mustCall())
+}
+{
+ const w = new Writable({
+ write: common.mustNotCall()
+ })
+ assert.strictEqual(w.writable, true)
+ w.end()
+ assert.strictEqual(w.writable, false)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-write-cb-error.js b/test/parallel/test-stream-writable-write-cb-error.js
new file mode 100644
index 0000000000..9d505f159d
--- /dev/null
+++ b/test/parallel/test-stream-writable-write-cb-error.js
@@ -0,0 +1,83 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Writable } = require('../../lib/ours/index')
+
+const assert = require('assert') // Ensure callback is always invoked before
+// error is emitted. Regardless if error was
+// sync or async.
+
+{
+ let callbackCalled = false // Sync Error
+
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ cb(new Error())
+ })
+ })
+ writable.on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(callbackCalled, true)
+ })
+ )
+ writable.write(
+ 'hi',
+ common.mustCall(() => {
+ callbackCalled = true
+ })
+ )
+}
+{
+ let callbackCalled = false // Async Error
+
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ process.nextTick(cb, new Error())
+ })
+ })
+ writable.on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(callbackCalled, true)
+ })
+ )
+ writable.write(
+ 'hi',
+ common.mustCall(() => {
+ callbackCalled = true
+ })
+ )
+}
+{
+ // Sync Error
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ cb(new Error())
+ })
+ })
+ writable.on('error', common.mustCall())
+ let cnt = 0 // Ensure we don't live lock on sync error
+
+ while (writable.write('a')) cnt++
+
+ assert.strictEqual(cnt, 0)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-write-cb-twice.js b/test/parallel/test-stream-writable-write-cb-twice.js
index 9f3fbc3ebc..b66f28153b 100644
--- a/test/parallel/test-stream-writable-write-cb-twice.js
+++ b/test/parallel/test-stream-writable-write-cb-twice.js
@@ -1,74 +1,78 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var _require = require('../../'),
- Writable = _require.Writable;
+const { Writable } = require('../../lib/ours/index')
{
// Sync + Sync
- var writable = new Writable({
- write: common.mustCall(function (buf, enc, cb) {
- cb();
- common.expectsError(cb, {
- code: 'ERR_MULTIPLE_CALLBACK',
- type: Error
- });
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ cb()
+ cb()
+ })
+ })
+ writable.write('hi')
+ writable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_MULTIPLE_CALLBACK',
+ name: 'Error'
})
- });
- writable.write('hi');
+ )
}
{
// Sync + Async
- var _writable = new Writable({
- write: common.mustCall(function (buf, enc, cb) {
- cb();
- process.nextTick(function () {
- common.expectsError(cb, {
- code: 'ERR_MULTIPLE_CALLBACK',
- type: Error
- });
- });
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ cb()
+ process.nextTick(() => {
+ cb()
+ })
})
- });
-
- _writable.write('hi');
+ })
+ writable.write('hi')
+ writable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_MULTIPLE_CALLBACK',
+ name: 'Error'
+ })
+ )
}
{
// Async + Async
- var _writable2 = new Writable({
- write: common.mustCall(function (buf, enc, cb) {
- process.nextTick(cb);
- process.nextTick(function () {
- common.expectsError(cb, {
- code: 'ERR_MULTIPLE_CALLBACK',
- type: Error
- });
- });
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ process.nextTick(cb)
+ process.nextTick(() => {
+ cb()
+ })
})
- });
-
- _writable2.write('hi');
+ })
+ writable.write('hi')
+ writable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_MULTIPLE_CALLBACK',
+ name: 'Error'
+ })
+ )
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-write-error.js b/test/parallel/test-stream-writable-write-error.js
new file mode 100644
index 0000000000..248a790969
--- /dev/null
+++ b/test/parallel/test-stream-writable-write-error.js
@@ -0,0 +1,103 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Writable } = require('../../lib/ours/index')
+
+function expectError(w, args, code, sync) {
+ if (sync) {
+ if (code) {
+ assert.throws(() => w.write(...args), {
+ code
+ })
+ } else {
+ w.write(...args)
+ }
+ } else {
+ let errorCalled = false
+ let ticked = false
+ w.write(
+ ...args,
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(errorCalled, false)
+ assert.strictEqual(err.code, code)
+ })
+ )
+ ticked = true
+ w.on(
+ 'error',
+ common.mustCall((err) => {
+ errorCalled = true
+ assert.strictEqual(err.code, code)
+ })
+ )
+ }
+}
+
+function test(autoDestroy) {
+ {
+ const w = new Writable({
+ autoDestroy,
+
+ _write() {}
+ })
+ w.end()
+ expectError(w, ['asd'], 'ERR_STREAM_WRITE_AFTER_END')
+ }
+ {
+ const w = new Writable({
+ autoDestroy,
+
+ _write() {}
+ })
+ w.destroy()
+ }
+ {
+ const w = new Writable({
+ autoDestroy,
+
+ _write() {}
+ })
+ expectError(w, [null], 'ERR_STREAM_NULL_VALUES', true)
+ }
+ {
+ const w = new Writable({
+ autoDestroy,
+
+ _write() {}
+ })
+ expectError(w, [{}], 'ERR_INVALID_ARG_TYPE', true)
+ }
+ {
+ const w = new Writable({
+ decodeStrings: false,
+ autoDestroy,
+
+ _write() {}
+ })
+ expectError(w, ['asd', 'noencoding'], 'ERR_UNKNOWN_ENCODING', true)
+ }
+}
+
+test(false)
+test(true)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-write-writev-finish.js b/test/parallel/test-stream-writable-write-writev-finish.js
index 135230fe10..d6cd55a394 100644
--- a/test/parallel/test-stream-writable-write-writev-finish.js
+++ b/test/parallel/test-stream-writable-write-writev-finish.js
@@ -1,209 +1,173 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../'); // ensure consistency between the finish event when using cork()
+const stream = require('../../lib/ours/index') // Ensure consistency between the finish event when using cork()
// and writev and when not using them
-
{
- var writable = new stream.Writable();
-
- writable._write = function (chunks, encoding, cb) {
- cb(new Error('write test error'));
- };
-
- var firstError = false;
- writable.on('finish', common.mustCall(function () {
- assert.strictEqual(firstError, true);
- }));
- writable.on('prefinish', common.mustCall());
- writable.on('error', common.mustCall(function (er) {
- assert.strictEqual(er.message, 'write test error');
- firstError = true;
- }));
- writable.end('test');
+ const writable = new stream.Writable()
+
+ writable._write = (chunks, encoding, cb) => {
+ cb(new Error('write test error'))
+ }
+
+ writable.on('finish', common.mustNotCall())
+ writable.on('prefinish', common.mustNotCall())
+ writable.on(
+ 'error',
+ common.mustCall((er) => {
+ assert.strictEqual(er.message, 'write test error')
+ })
+ )
+ writable.end('test')
}
{
- var _writable = new stream.Writable();
-
- _writable._write = function (chunks, encoding, cb) {
- setImmediate(cb, new Error('write test error'));
- };
-
- var _firstError = false;
-
- _writable.on('finish', common.mustCall(function () {
- assert.strictEqual(_firstError, true);
- }));
-
- _writable.on('prefinish', common.mustCall());
-
- _writable.on('error', common.mustCall(function (er) {
- assert.strictEqual(er.message, 'write test error');
- _firstError = true;
- }));
-
- _writable.end('test');
+ const writable = new stream.Writable()
+
+ writable._write = (chunks, encoding, cb) => {
+ setImmediate(cb, new Error('write test error'))
+ }
+
+ writable.on('finish', common.mustNotCall())
+ writable.on('prefinish', common.mustNotCall())
+ writable.on(
+ 'error',
+ common.mustCall((er) => {
+ assert.strictEqual(er.message, 'write test error')
+ })
+ )
+ writable.end('test')
}
{
- var _writable2 = new stream.Writable();
-
- _writable2._write = function (chunks, encoding, cb) {
- cb(new Error('write test error'));
- };
-
- _writable2._writev = function (chunks, cb) {
- cb(new Error('writev test error'));
- };
-
- var _firstError2 = false;
-
- _writable2.on('finish', common.mustCall(function () {
- assert.strictEqual(_firstError2, true);
- }));
-
- _writable2.on('prefinish', common.mustCall());
-
- _writable2.on('error', common.mustCall(function (er) {
- assert.strictEqual(er.message, 'writev test error');
- _firstError2 = true;
- }));
-
- _writable2.cork();
-
- _writable2.write('test');
-
+ const writable = new stream.Writable()
+
+ writable._write = (chunks, encoding, cb) => {
+ cb(new Error('write test error'))
+ }
+
+ writable._writev = (chunks, cb) => {
+ cb(new Error('writev test error'))
+ }
+
+ writable.on('finish', common.mustNotCall())
+ writable.on('prefinish', common.mustNotCall())
+ writable.on(
+ 'error',
+ common.mustCall((er) => {
+ assert.strictEqual(er.message, 'writev test error')
+ })
+ )
+ writable.cork()
+ writable.write('test')
setImmediate(function () {
- _writable2.end('test');
- });
+ writable.end('test')
+ })
}
{
- var _writable3 = new stream.Writable();
-
- _writable3._write = function (chunks, encoding, cb) {
- setImmediate(cb, new Error('write test error'));
- };
-
- _writable3._writev = function (chunks, cb) {
- setImmediate(cb, new Error('writev test error'));
- };
-
- var _firstError3 = false;
-
- _writable3.on('finish', common.mustCall(function () {
- assert.strictEqual(_firstError3, true);
- }));
-
- _writable3.on('prefinish', common.mustCall());
-
- _writable3.on('error', common.mustCall(function (er) {
- assert.strictEqual(er.message, 'writev test error');
- _firstError3 = true;
- }));
-
- _writable3.cork();
-
- _writable3.write('test');
-
+ const writable = new stream.Writable()
+
+ writable._write = (chunks, encoding, cb) => {
+ setImmediate(cb, new Error('write test error'))
+ }
+
+ writable._writev = (chunks, cb) => {
+ setImmediate(cb, new Error('writev test error'))
+ }
+
+ writable.on('finish', common.mustNotCall())
+ writable.on('prefinish', common.mustNotCall())
+ writable.on(
+ 'error',
+ common.mustCall((er) => {
+ assert.strictEqual(er.message, 'writev test error')
+ })
+ )
+ writable.cork()
+ writable.write('test')
setImmediate(function () {
- _writable3.end('test');
- });
+ writable.end('test')
+ })
} // Regression test for
// https://github.com/nodejs/node/issues/13812
{
- var rs = new stream.Readable();
- rs.push('ok');
- rs.push(null);
-
- rs._read = function () {};
-
- var ws = new stream.Writable();
- var _firstError4 = false;
- ws.on('finish', common.mustCall(function () {
- assert.strictEqual(_firstError4, true);
- }));
- ws.on('error', common.mustCall(function () {
- _firstError4 = true;
- }));
-
- ws._write = function (chunk, encoding, done) {
- setImmediate(done, new Error());
- };
-
- rs.pipe(ws);
-}
-{
- var _rs = new stream.Readable();
+ const rs = new stream.Readable()
+ rs.push('ok')
+ rs.push(null)
- _rs.push('ok');
+ rs._read = () => {}
- _rs.push(null);
+ const ws = new stream.Writable()
+ ws.on('finish', common.mustNotCall())
+ ws.on('error', common.mustCall())
- _rs._read = function () {};
+ ws._write = (chunk, encoding, done) => {
+ setImmediate(done, new Error())
+ }
- var _ws = new stream.Writable();
+ rs.pipe(ws)
+}
+{
+ const rs = new stream.Readable()
+ rs.push('ok')
+ rs.push(null)
- _ws.on('finish', common.mustNotCall());
+ rs._read = () => {}
- _ws.on('error', common.mustCall());
+ const ws = new stream.Writable()
+ ws.on('finish', common.mustNotCall())
+ ws.on('error', common.mustCall())
- _ws._write = function (chunk, encoding, done) {
- done(new Error());
- };
+ ws._write = (chunk, encoding, done) => {
+ done(new Error())
+ }
- _rs.pipe(_ws);
+ rs.pipe(ws)
}
{
- var w = new stream.Writable();
-
- w._write = function (chunk, encoding, cb) {
- process.nextTick(cb);
- };
-
- w.on('error', common.mustCall());
- w.on('prefinish', function () {
- w.write("shouldn't write in prefinish listener");
- });
- w.end();
+ const w = new stream.Writable()
+
+ w._write = (chunk, encoding, cb) => {
+ process.nextTick(cb)
+ }
+
+ w.on('error', common.mustCall())
+ w.on('finish', common.mustNotCall())
+ w.on('prefinish', () => {
+ w.write("shouldn't write in prefinish listener")
+ })
+ w.end()
}
{
- var _w = new stream.Writable();
-
- _w._write = function (chunk, encoding, cb) {
- process.nextTick(cb);
- };
+ const w = new stream.Writable()
- _w.on('error', common.mustCall());
+ w._write = (chunk, encoding, cb) => {
+ process.nextTick(cb)
+ }
- _w.on('finish', function () {
- _w.write("shouldn't write in finish listener");
- });
-
- _w.end();
+ w.on('error', common.mustCall())
+ w.on('finish', () => {
+ w.write("shouldn't write in finish listener")
+ })
+ w.end()
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writableState-ending.js b/test/parallel/test-stream-writableState-ending.js
index 49754b4385..6eaac1958c 100644
--- a/test/parallel/test-stream-writableState-ending.js
+++ b/test/parallel/test-stream-writableState-ending.js
@@ -1,57 +1,52 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var writable = new stream.Writable();
+const writable = new stream.Writable()
function testStates(ending, finished, ended) {
- assert.strictEqual(writable._writableState.ending, ending);
- assert.strictEqual(writable._writableState.finished, finished);
- assert.strictEqual(writable._writableState.ended, ended);
+ assert.strictEqual(writable._writableState.ending, ending)
+ assert.strictEqual(writable._writableState.finished, finished)
+ assert.strictEqual(writable._writableState.ended, ended)
}
-writable._write = function (chunk, encoding, cb) {
- // ending, finished, ended start in false.
- testStates(false, false, false);
- cb();
-};
-
-writable.on('finish', function () {
- // ending, finished, ended = true.
- testStates(true, true, true);
-});
-var result = writable.end('testing function end()', function () {
- // ending, finished, ended = true.
- testStates(true, true, true);
-}); // end returns the writable instance
-
-assert.strictEqual(result, writable); // ending, ended = true.
-// finished = false.
-
-testStates(true, false, true);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
+writable._write = (chunk, encoding, cb) => {
+ // Ending, finished, ended start in false.
+ testStates(false, false, false)
+ cb()
+}
-process.removeAllListeners('uncaughtException');
+writable.on('finish', () => {
+ // Ending, finished, ended = true.
+ testStates(true, true, true)
+})
+const result = writable.end('testing function end()', () => {
+ // Ending, finished, ended = true.
+ testStates(true, true, true)
+}) // End returns the writable instance
-_list.pop();
+assert.strictEqual(result, writable) // Ending, ended = true.
+// finished = false.
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+testStates(true, false, true)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js
index d3efeb7ab8..99b3a46c73 100644
--- a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js
+++ b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js
@@ -1,72 +1,66 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var writable = new stream.Writable();
-writable._writev = common.mustCall(function (chunks, cb) {
- assert.strictEqual(chunks.length, 2);
- cb();
-}, 1);
-writable._write = common.mustCall(function (chunk, encoding, cb) {
- cb();
-}, 1); // first cork
+const writable = new stream.Writable()
+writable._writev = common.mustCall((chunks, cb) => {
+ assert.strictEqual(chunks.length, 2)
+ cb()
+}, 1)
+writable._write = common.mustCall((chunk, encoding, cb) => {
+ cb()
+}, 1) // first cork
-writable.cork();
-assert.strictEqual(writable._writableState.corked, 1);
-assert.strictEqual(writable._writableState.bufferedRequestCount, 0); // cork again
+writable.cork()
+assert.strictEqual(writable._writableState.corked, 1)
+assert.strictEqual(writable._writableState.bufferedRequestCount, 0) // cork again
-writable.cork();
-assert.strictEqual(writable._writableState.corked, 2); // the first chunk is buffered
+writable.cork()
+assert.strictEqual(writable._writableState.corked, 2) // The first chunk is buffered
-writable.write('first chunk');
-assert.strictEqual(writable._writableState.bufferedRequestCount, 1); // first uncork does nothing
+writable.write('first chunk')
+assert.strictEqual(writable._writableState.bufferedRequestCount, 1) // First uncork does nothing
-writable.uncork();
-assert.strictEqual(writable._writableState.corked, 1);
-assert.strictEqual(writable._writableState.bufferedRequestCount, 1);
-process.nextTick(uncork); // the second chunk is buffered, because we uncork at the end of tick
+writable.uncork()
+assert.strictEqual(writable._writableState.corked, 1)
+assert.strictEqual(writable._writableState.bufferedRequestCount, 1)
+process.nextTick(uncork) // The second chunk is buffered, because we uncork at the end of tick
-writable.write('second chunk');
-assert.strictEqual(writable._writableState.corked, 1);
-assert.strictEqual(writable._writableState.bufferedRequestCount, 2);
+writable.write('second chunk')
+assert.strictEqual(writable._writableState.corked, 1)
+assert.strictEqual(writable._writableState.bufferedRequestCount, 2)
function uncork() {
- // second uncork flushes the buffer
- writable.uncork();
- assert.strictEqual(writable._writableState.corked, 0);
- assert.strictEqual(writable._writableState.bufferedRequestCount, 0); // verify that end() uncorks correctly
+ // Second uncork flushes the buffer
+ writable.uncork()
+ assert.strictEqual(writable._writableState.corked, 0)
+ assert.strictEqual(writable._writableState.bufferedRequestCount, 0) // Verify that end() uncorks correctly
- writable.cork();
- writable.write('third chunk');
- writable.end(); // end causes an uncork() as well
+ writable.cork()
+ writable.write('third chunk')
+ writable.end() // End causes an uncork() as well
- assert.strictEqual(writable._writableState.corked, 0);
- assert.strictEqual(writable._writableState.bufferedRequestCount, 0);
+ assert.strictEqual(writable._writableState.corked, 0)
+ assert.strictEqual(writable._writableState.bufferedRequestCount, 0)
}
-
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-write-destroy.js b/test/parallel/test-stream-write-destroy.js
index 6528b2bacd..031e4bfdf6 100644
--- a/test/parallel/test-stream-write-destroy.js
+++ b/test/parallel/test-stream-write-destroy.js
@@ -1,101 +1,80 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var _require = require('../../'),
- Writable = _require.Writable; // Test interaction between calling .destroy() on a writable and pending
+const { Writable } = require('../../lib/ours/index') // Test interaction between calling .destroy() on a writable and pending
// writes.
-
-for (var _i = 0, _arr = [false, true]; _i < _arr.length; _i++) {
- var withPendingData = _arr[_i];
-
- var _loop = function _loop() {
- var useEnd = _arr2[_i2];
- var callbacks = [];
- var w = new Writable({
- write: function write(data, enc, cb) {
- callbacks.push(cb);
+for (const withPendingData of [false, true]) {
+ for (const useEnd of [false, true]) {
+ const callbacks = []
+ const w = new Writable({
+ write(data, enc, cb) {
+ callbacks.push(cb)
},
+
// Effectively disable the HWM to observe 'drain' events more easily.
highWaterMark: 1
- });
- var chunksWritten = 0;
- var drains = 0;
- var finished = false;
- w.on('drain', function () {
- return drains++;
- });
- w.on('finish', function () {
- return finished = true;
- });
- w.write('abc', function () {
- return chunksWritten++;
- });
- assert.strictEqual(chunksWritten, 0);
- assert.strictEqual(drains, 0);
- callbacks.shift()();
- assert.strictEqual(chunksWritten, 1);
- assert.strictEqual(drains, 1);
+ })
+ let chunksWritten = 0
+ let drains = 0
+ w.on('drain', () => drains++)
+
+ function onWrite(err) {
+ if (err) {
+ assert.strictEqual(w.destroyed, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED')
+ } else {
+ chunksWritten++
+ }
+ }
+
+ w.write('abc', onWrite)
+ assert.strictEqual(chunksWritten, 0)
+ assert.strictEqual(drains, 0)
+ callbacks.shift()()
+ assert.strictEqual(chunksWritten, 1)
+ assert.strictEqual(drains, 1)
if (withPendingData) {
// Test 2 cases: There either is or is not data still in the write queue.
// (The second write will never actually get executed either way.)
- w.write('def', function () {
- return chunksWritten++;
- });
+ w.write('def', onWrite)
}
if (useEnd) {
// Again, test 2 cases: Either we indicate that we want to end the
// writable or not.
- w.end('ghi', function () {
- return chunksWritten++;
- });
+ w.end('ghi', onWrite)
} else {
- w.write('ghi', function () {
- return chunksWritten++;
- });
+ w.write('ghi', onWrite)
}
- assert.strictEqual(chunksWritten, 1);
- w.destroy();
- assert.strictEqual(chunksWritten, 1);
- callbacks.shift()();
- assert.strictEqual(chunksWritten, 2);
- assert.strictEqual(callbacks.length, 0);
- assert.strictEqual(drains, 1); // When we used `.end()`, we see the 'finished' event if and only if
- // we actually finished processing the write queue.
-
- assert.strictEqual(finished, !withPendingData && useEnd);
- };
-
- for (var _i2 = 0, _arr2 = [false, true]; _i2 < _arr2.length; _i2++) {
- _loop();
+ assert.strictEqual(chunksWritten, 1)
+ w.destroy()
+ assert.strictEqual(chunksWritten, 1)
+ callbacks.shift()()
+ assert.strictEqual(chunksWritten, useEnd && !withPendingData ? 1 : 2)
+ assert.strictEqual(callbacks.length, 0)
+ assert.strictEqual(drains, 1)
}
}
+/* replacement start */
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-write-drain.js b/test/parallel/test-stream-write-drain.js
new file mode 100644
index 0000000000..dffbb18271
--- /dev/null
+++ b/test/parallel/test-stream-write-drain.js
@@ -0,0 +1,33 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Writable } = require('../../lib/ours/index') // Don't emit 'drain' if ended
+
+const w = new Writable({
+ write(data, enc, cb) {
+ process.nextTick(cb)
+ },
+
+ highWaterMark: 1
+})
+w.on('drain', common.mustNotCall())
+w.write('asd')
+w.end()
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-write-final.js b/test/parallel/test-stream-write-final.js
index 3f7ba25754..0b21343a9c 100644
--- a/test/parallel/test-stream-write-final.js
+++ b/test/parallel/test-stream-write-final.js
@@ -1,48 +1,46 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var shutdown = false;
-var w = new stream.Writable({
+let shutdown = false
+const w = new stream.Writable({
final: common.mustCall(function (cb) {
- assert.strictEqual(this, w);
+ assert.strictEqual(this, w)
setTimeout(function () {
- shutdown = true;
- cb();
- }, 100);
+ shutdown = true
+ cb()
+ }, 100)
}),
- write: function write(chunk, e, cb) {
- process.nextTick(cb);
+ write: function (chunk, e, cb) {
+ process.nextTick(cb)
}
-});
-w.on('finish', common.mustCall(function () {
- assert(shutdown);
-}));
-w.write(bufferShim.allocUnsafe(1));
-w.end(bufferShim.allocUnsafe(0));
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert(shutdown)
+ })
+)
+w.write(Buffer.allocUnsafe(1))
+w.end(Buffer.allocUnsafe(0))
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writev.js b/test/parallel/test-stream-writev.js
index ea0fcc8afa..17b91aa474 100644
--- a/test/parallel/test-stream-writev.js
+++ b/test/parallel/test-stream-writev.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,130 +18,148 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var queue = [];
+const queue = []
-for (var decode = 0; decode < 2; decode++) {
- for (var uncork = 0; uncork < 2; uncork++) {
- for (var multi = 0; multi < 2; multi++) {
- queue.push([!!decode, !!uncork, !!multi]);
+for (let decode = 0; decode < 2; decode++) {
+ for (let uncork = 0; uncork < 2; uncork++) {
+ for (let multi = 0; multi < 2; multi++) {
+ queue.push([!!decode, !!uncork, !!multi])
}
}
}
-run();
+run()
function run() {
- var t = queue.pop();
- if (t) test(t[0], t[1], t[2], run);else require('tap').pass();
+ const t = queue.pop()
+ if (t) test(t[0], t[1], t[2], run)
+ else silentConsole.log('ok')
}
function test(decode, uncork, multi, next) {
- require('tap').test("# decode=".concat(decode, " uncork=").concat(uncork, " multi=").concat(multi));
-
- var counter = 0;
- var expectCount = 0;
+ silentConsole.log(`# decode=${decode} uncork=${uncork} multi=${multi}`)
+ let counter = 0
+ let expectCount = 0
function cnt(msg) {
- expectCount++;
- var expect = expectCount;
+ expectCount++
+ const expect = expectCount
return function (er) {
- assert.ifError(er);
- counter++;
- assert.strictEqual(counter, expect);
- };
+ assert.ifError(er)
+ counter++
+ assert.strictEqual(counter, expect)
+ }
}
- var w = new stream.Writable({
+ const w = new stream.Writable({
decodeStrings: decode
- });
- w._write = common.mustNotCall('Should not call _write');
- var expectChunks = decode ? [{
- encoding: 'buffer',
- chunk: [104, 101, 108, 108, 111, 44, 32]
- }, {
- encoding: 'buffer',
- chunk: [119, 111, 114, 108, 100]
- }, {
- encoding: 'buffer',
- chunk: [33]
- }, {
- encoding: 'buffer',
- chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46]
- }, {
- encoding: 'buffer',
- chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173]
- }] : [{
- encoding: 'ascii',
- chunk: 'hello, '
- }, {
- encoding: 'utf8',
- chunk: 'world'
- }, {
- encoding: 'buffer',
- chunk: [33]
- }, {
- encoding: 'latin1',
- chunk: '\nand then...'
- }, {
- encoding: 'hex',
- chunk: 'facebea7deadbeefdecafbad'
- }];
- var actualChunks;
+ })
+ w._write = common.mustNotCall('Should not call _write')
+ const expectChunks = decode
+ ? [
+ {
+ encoding: 'buffer',
+ chunk: [104, 101, 108, 108, 111, 44, 32]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [119, 111, 114, 108, 100]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [33]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173]
+ }
+ ]
+ : [
+ {
+ encoding: 'ascii',
+ chunk: 'hello, '
+ },
+ {
+ encoding: 'utf8',
+ chunk: 'world'
+ },
+ {
+ encoding: 'buffer',
+ chunk: [33]
+ },
+ {
+ encoding: 'latin1',
+ chunk: '\nand then...'
+ },
+ {
+ encoding: 'hex',
+ chunk: 'facebea7deadbeefdecafbad'
+ }
+ ]
+ let actualChunks
w._writev = function (chunks, cb) {
actualChunks = chunks.map(function (chunk) {
return {
encoding: chunk.encoding,
chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk
- };
- });
- cb();
- };
-
- w.cork();
- w.write('hello, ', 'ascii', cnt('hello'));
- w.write('world', 'utf8', cnt('world'));
- if (multi) w.cork();
- w.write(bufferShim.from('!'), 'buffer', cnt('!'));
- w.write('\nand then...', 'latin1', cnt('and then'));
- if (multi) w.uncork();
- w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'));
- if (uncork) w.uncork();
- w.end(cnt('end'));
+ }
+ })
+ cb()
+ }
+
+ w.cork()
+ w.write('hello, ', 'ascii', cnt('hello'))
+ w.write('world', 'utf8', cnt('world'))
+ if (multi) w.cork()
+ w.write(Buffer.from('!'), 'buffer', cnt('!'))
+ w.write('\nand then...', 'latin1', cnt('and then'))
+ if (multi) w.uncork()
+ w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'))
+ if (uncork) w.uncork()
+ w.end(cnt('end'))
w.on('finish', function () {
- // make sure finish comes after all the write cb
- cnt('finish')();
- assert.deepStrictEqual(actualChunks, expectChunks);
- next();
- });
+ // Make sure finish comes after all the write cb
+ cnt('finish')()
+ assert.deepStrictEqual(actualChunks, expectChunks)
+ next()
+ })
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+{
+ const w = new stream.Writable({
+ writev: common.mustCall(function (chunks, cb) {
+ cb()
+ })
+ })
+ w.write('asd', common.mustCall())
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-base64-single-char-read-end.js b/test/parallel/test-stream2-base64-single-char-read-end.js
index 5ee7b9d52e..afab641d37 100644
--- a/test/parallel/test-stream2-base64-single-char-read-end.js
+++ b/test/parallel/test-stream2-base64-single-char-read-end.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,64 +18,58 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
+const tap = require('tap')
-require('../common');
+const silentConsole = {
+ log() {},
-var R = require('../../lib/_stream_readable');
+ error() {}
+}
+require('../common')
-var W = require('../../lib/_stream_writable');
+const { Readable: R, Writable: W } = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
-var src = new R({
+const src = new R({
encoding: 'base64'
-});
-var dst = new W();
-var hasRead = false;
-var accum = [];
+})
+const dst = new W()
+let hasRead = false
+const accum = []
src._read = function (n) {
if (!hasRead) {
- hasRead = true;
+ hasRead = true
process.nextTick(function () {
- src.push(bufferShim.from('1'));
- src.push(null);
- });
+ src.push(Buffer.from('1'))
+ src.push(null)
+ })
}
-};
+}
dst._write = function (chunk, enc, cb) {
- accum.push(chunk);
- cb();
-};
+ accum.push(chunk)
+ cb()
+}
src.on('end', function () {
- assert.strictEqual(String(Buffer.concat(accum)), 'MQ==');
- clearTimeout(timeout);
-});
-src.pipe(dst);
-var timeout = setTimeout(function () {
- assert.fail('timed out waiting for _write');
-}, 100);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(String(Buffer.concat(accum)), 'MQ==')
+ clearTimeout(timeout)
+})
+src.pipe(dst)
+const timeout = setTimeout(function () {
+ assert.fail('timed out waiting for _write')
+}, 100)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-basic.js b/test/parallel/test-stream2-basic.js
index 7a16dc3fbf..81ef6080a7 100644
--- a/test/parallel/test-stream2-basic.js
+++ b/test/parallel/test-stream2-basic.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,407 +18,401 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
+const tap = require('tap')
-var common = require('../common');
+const silentConsole = {
+ log() {},
-var R = require('../../lib/_stream_readable');
-
-var assert = require('assert/');
-
-var EE = require('events').EventEmitter;
+ error() {}
+}
+const common = require('../common')
-var TestReader =
-/*#__PURE__*/
-function (_R) {
- _inherits(TestReader, _R);
+const { Readable: R, Writable: W } = require('../../lib/ours/index')
- function TestReader(n) {
- var _this;
+const assert = require('assert')
- _classCallCheck(this, TestReader);
+const EE = require('events').EventEmitter
- _this = _possibleConstructorReturn(this, _getPrototypeOf(TestReader).call(this));
- _this._buffer = bufferShim.alloc(n || 100, 'x');
- _this._pos = 0;
- _this._bufs = 10;
- return _this;
+class TestReader extends R {
+ constructor(n) {
+ super()
+ this._buffer = Buffer.alloc(n || 100, 'x')
+ this._pos = 0
+ this._bufs = 10
}
- _createClass(TestReader, [{
- key: "_read",
- value: function _read(n) {
- var _this2 = this;
-
- var max = this._buffer.length - this._pos;
- n = Math.max(n, 0);
- var toRead = Math.min(n, max);
-
- if (toRead === 0) {
- // simulate the read buffer filling up with some more bytes some time
- // in the future.
- setTimeout(function () {
- _this2._pos = 0;
- _this2._bufs -= 1;
-
- if (_this2._bufs <= 0) {
- // read them all!
- if (!_this2.ended) _this2.push(null);
- } else {
- // now we have more.
- // kinda cheating by calling _read, but whatever,
- // it's just fake anyway.
- _this2._read(n);
- }
- }, 10);
- return;
- }
-
- var ret = this._buffer.slice(this._pos, this._pos + toRead);
-
- this._pos += toRead;
- this.push(ret);
+ _read(n) {
+ const max = this._buffer.length - this._pos
+ n = Math.max(n, 0)
+ const toRead = Math.min(n, max)
+
+ if (toRead === 0) {
+ // Simulate the read buffer filling up with some more bytes some time
+ // in the future.
+ setTimeout(() => {
+ this._pos = 0
+ this._bufs -= 1
+
+ if (this._bufs <= 0) {
+ // read them all!
+ if (!this.ended) this.push(null)
+ } else {
+ // now we have more.
+ // kinda cheating by calling _read, but whatever,
+ // it's just fake anyway.
+ this._read(n)
+ }
+ }, 10)
+ return
}
- }]);
-
- return TestReader;
-}(R);
-
-var TestWriter =
-/*#__PURE__*/
-function (_EE) {
- _inherits(TestWriter, _EE);
- function TestWriter() {
- var _this3;
+ const ret = this._buffer.slice(this._pos, this._pos + toRead)
- _classCallCheck(this, TestWriter);
+ this._pos += toRead
+ this.push(ret)
+ }
+}
- _this3 = _possibleConstructorReturn(this, _getPrototypeOf(TestWriter).call(this));
- _this3.received = [];
- _this3.flush = false;
- return _this3;
+class TestWriter extends EE {
+ constructor() {
+ super()
+ this.received = []
+ this.flush = false
}
- _createClass(TestWriter, [{
- key: "write",
- value: function write(c) {
- this.received.push(c.toString());
- this.emit('write', c);
- return true;
- }
- }, {
- key: "end",
- value: function end(c) {
- if (c) this.write(c);
- this.emit('end', this.received);
- }
- }]);
+ write(c) {
+ this.received.push(c.toString())
+ this.emit('write', c)
+ return true
+ }
- return TestWriter;
-}(EE);
+ end(c) {
+ if (c) this.write(c)
+ this.emit('end', this.received)
+ }
+}
{
// Test basic functionality
- var r = new TestReader(20);
- var reads = [];
- var expect = ['x', 'xx', 'xxx', 'xxxx', 'xxxxx', 'xxxxxxxxx', 'xxxxxxxxxx', 'xxxxxxxxxxxx', 'xxxxxxxxxxxxx', 'xxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxxxx'];
- r.on('end', common.mustCall(function () {
- assert.deepStrictEqual(reads, expect);
- }));
- var readSize = 1;
+ const r = new TestReader(20)
+ const reads = []
+ const expect = [
+ 'x',
+ 'xx',
+ 'xxx',
+ 'xxxx',
+ 'xxxxx',
+ 'xxxxxxxxx',
+ 'xxxxxxxxxx',
+ 'xxxxxxxxxxxx',
+ 'xxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxxxxxx'
+ ]
+ r.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(reads, expect)
+ })
+ )
+ let readSize = 1
function flow() {
- var res;
+ let res
while (null !== (res = r.read(readSize++))) {
- reads.push(res.toString());
+ reads.push(res.toString())
}
- r.once('readable', flow);
+ r.once('readable', flow)
}
- flow();
+ flow()
}
{
// Verify pipe
- var _r = new TestReader(5);
-
- var _expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx'];
- var w = new TestWriter();
- w.on('end', common.mustCall(function (received) {
- assert.deepStrictEqual(received, _expect);
- }));
-
- _r.pipe(w);
+ const r = new TestReader(5)
+ const expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']
+ const w = new TestWriter()
+ w.on(
+ 'end',
+ common.mustCall(function (received) {
+ assert.deepStrictEqual(received, expect)
+ })
+ )
+ r.pipe(w)
}
-forEach([1, 2, 3, 4, 5, 6, 7, 8, 9], function (SPLIT) {
+;[1, 2, 3, 4, 5, 6, 7, 8, 9].forEach(function (SPLIT) {
// Verify unpipe
- var r = new TestReader(5); // unpipe after 3 writes, then write to another stream instead.
+ const r = new TestReader(5) // Unpipe after 3 writes, then write to another stream instead.
- var expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx'];
- expect = [expect.slice(0, SPLIT), expect.slice(SPLIT)];
- var w = [new TestWriter(), new TestWriter()];
- var writes = SPLIT;
+ let expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']
+ expect = [expect.slice(0, SPLIT), expect.slice(SPLIT)]
+ const w = [new TestWriter(), new TestWriter()]
+ let writes = SPLIT
w[0].on('write', function () {
if (--writes === 0) {
- r.unpipe();
- assert.strictEqual(r._readableState.pipes, null);
- w[0].end();
- r.pipe(w[1]);
- assert.strictEqual(r._readableState.pipes, w[1]);
+ r.unpipe()
+ assert.deepStrictEqual(r._readableState.pipes, [])
+ w[0].end()
+ r.pipe(w[1])
+ assert.deepStrictEqual(r._readableState.pipes, [w[1]])
}
- });
- var ended = 0;
- w[0].on('end', common.mustCall(function (results) {
- ended++;
- assert.strictEqual(ended, 1);
- assert.deepStrictEqual(results, expect[0]);
- }));
- w[1].on('end', common.mustCall(function (results) {
- ended++;
- assert.strictEqual(ended, 2);
- assert.deepStrictEqual(results, expect[1]);
- }));
- r.pipe(w[0]);
-});
+ })
+ let ended = 0
+ w[0].on(
+ 'end',
+ common.mustCall(function (results) {
+ ended++
+ assert.strictEqual(ended, 1)
+ assert.deepStrictEqual(results, expect[0])
+ })
+ )
+ w[1].on(
+ 'end',
+ common.mustCall(function (results) {
+ ended++
+ assert.strictEqual(ended, 2)
+ assert.deepStrictEqual(results, expect[1])
+ })
+ )
+ r.pipe(w[0])
+})
{
// Verify both writers get the same data when piping to destinations
- var _r2 = new TestReader(5);
-
- var _w = [new TestWriter(), new TestWriter()];
- var _expect2 = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx'];
-
- _w[0].on('end', common.mustCall(function (received) {
- assert.deepStrictEqual(received, _expect2);
- }));
-
- _w[1].on('end', common.mustCall(function (received) {
- assert.deepStrictEqual(received, _expect2);
- }));
-
- _r2.pipe(_w[0]);
-
- _r2.pipe(_w[1]);
+ const r = new TestReader(5)
+ const w = [new TestWriter(), new TestWriter()]
+ const expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']
+ w[0].on(
+ 'end',
+ common.mustCall(function (received) {
+ assert.deepStrictEqual(received, expect)
+ })
+ )
+ w[1].on(
+ 'end',
+ common.mustCall(function (received) {
+ assert.deepStrictEqual(received, expect)
+ })
+ )
+ r.pipe(w[0])
+ r.pipe(w[1])
}
-forEach([1, 2, 3, 4, 5, 6, 7, 8, 9], function (SPLIT) {
+;[1, 2, 3, 4, 5, 6, 7, 8, 9].forEach(function (SPLIT) {
// Verify multi-unpipe
- var r = new TestReader(5); // unpipe after 3 writes, then write to another stream instead.
+ const r = new TestReader(5) // Unpipe after 3 writes, then write to another stream instead.
- var expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx'];
- expect = [expect.slice(0, SPLIT), expect.slice(SPLIT)];
- var w = [new TestWriter(), new TestWriter(), new TestWriter()];
- var writes = SPLIT;
+ let expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']
+ expect = [expect.slice(0, SPLIT), expect.slice(SPLIT)]
+ const w = [new TestWriter(), new TestWriter(), new TestWriter()]
+ let writes = SPLIT
w[0].on('write', function () {
if (--writes === 0) {
- r.unpipe();
- w[0].end();
- r.pipe(w[1]);
+ r.unpipe()
+ w[0].end()
+ r.pipe(w[1])
}
- });
- var ended = 0;
- w[0].on('end', common.mustCall(function (results) {
- ended++;
- assert.strictEqual(ended, 1);
- assert.deepStrictEqual(results, expect[0]);
- }));
- w[1].on('end', common.mustCall(function (results) {
- ended++;
- assert.strictEqual(ended, 2);
- assert.deepStrictEqual(results, expect[1]);
- }));
- r.pipe(w[0]);
- r.pipe(w[2]);
-});
+ })
+ let ended = 0
+ w[0].on(
+ 'end',
+ common.mustCall(function (results) {
+ ended++
+ assert.strictEqual(ended, 1)
+ assert.deepStrictEqual(results, expect[0])
+ })
+ )
+ w[1].on(
+ 'end',
+ common.mustCall(function (results) {
+ ended++
+ assert.strictEqual(ended, 2)
+ assert.deepStrictEqual(results, expect[1])
+ })
+ )
+ r.pipe(w[0])
+ r.pipe(w[2])
+})
{
// Verify that back pressure is respected
- var _r3 = new R({
+ const r = new R({
objectMode: true
- });
-
- _r3._read = common.mustNotCall();
- var counter = 0;
-
- _r3.push(['one']);
-
- _r3.push(['two']);
-
- _r3.push(['three']);
-
- _r3.push(['four']);
-
- _r3.push(null);
-
- var w1 = new R();
+ })
+ r._read = common.mustNotCall()
+ let counter = 0
+ r.push(['one'])
+ r.push(['two'])
+ r.push(['three'])
+ r.push(['four'])
+ r.push(null)
+ const w1 = new R()
w1.write = function (chunk) {
- assert.strictEqual(chunk[0], 'one');
- w1.emit('close');
+ assert.strictEqual(chunk[0], 'one')
+ w1.emit('close')
process.nextTick(function () {
- _r3.pipe(w2);
-
- _r3.pipe(w3);
- });
- };
-
- w1.end = common.mustNotCall();
-
- _r3.pipe(w1);
+ r.pipe(w2)
+ r.pipe(w3)
+ })
+ }
- var expected = ['two', 'two', 'three', 'three', 'four', 'four'];
- var w2 = new R();
+ w1.end = common.mustNotCall()
+ r.pipe(w1)
+ const expected = ['two', 'two', 'three', 'three', 'four', 'four']
+ const w2 = new R()
w2.write = function (chunk) {
- assert.strictEqual(chunk[0], expected.shift());
- assert.strictEqual(counter, 0);
- counter++;
+ assert.strictEqual(chunk[0], expected.shift())
+ assert.strictEqual(counter, 0)
+ counter++
if (chunk[0] === 'four') {
- return true;
+ return true
}
setTimeout(function () {
- counter--;
- w2.emit('drain');
- }, 10);
- return false;
- };
+ counter--
+ w2.emit('drain')
+ }, 10)
+ return false
+ }
- w2.end = common.mustCall();
- var w3 = new R();
+ w2.end = common.mustCall()
+ const w3 = new R()
w3.write = function (chunk) {
- assert.strictEqual(chunk[0], expected.shift());
- assert.strictEqual(counter, 1);
- counter++;
+ assert.strictEqual(chunk[0], expected.shift())
+ assert.strictEqual(counter, 1)
+ counter++
if (chunk[0] === 'four') {
- return true;
+ return true
}
setTimeout(function () {
- counter--;
- w3.emit('drain');
- }, 50);
- return false;
- };
+ counter--
+ w3.emit('drain')
+ }, 50)
+ return false
+ }
w3.end = common.mustCall(function () {
- assert.strictEqual(counter, 2);
- assert.strictEqual(expected.length, 0);
- });
+ assert.strictEqual(counter, 2)
+ assert.strictEqual(expected.length, 0)
+ })
}
{
// Verify read(0) behavior for ended streams
- var _r4 = new R();
-
- var written = false;
- var ended = false;
- _r4._read = common.mustNotCall();
-
- _r4.push(bufferShim.from('foo'));
-
- _r4.push(null);
-
- var v = _r4.read(0);
-
- assert.strictEqual(v, null);
-
- var _w2 = new R();
-
- _w2.write = function (buffer) {
- written = true;
- assert.strictEqual(ended, false);
- assert.strictEqual(buffer.toString(), 'foo');
- };
-
- _w2.end = common.mustCall(function () {
- ended = true;
- assert.strictEqual(written, true);
- });
+ const r = new R()
+ let written = false
+ let ended = false
+ r._read = common.mustNotCall()
+ r.push(Buffer.from('foo'))
+ r.push(null)
+ const v = r.read(0)
+ assert.strictEqual(v, null)
+ const w = new R()
+
+ w.write = function (buffer) {
+ written = true
+ assert.strictEqual(ended, false)
+ assert.strictEqual(buffer.toString(), 'foo')
+ }
- _r4.pipe(_w2);
+ w.end = common.mustCall(function () {
+ ended = true
+ assert.strictEqual(written, true)
+ })
+ r.pipe(w)
}
{
// Verify synchronous _read ending
- var _r5 = new R();
-
- var called = false;
+ const r = new R()
+ let called = false
- _r5._read = function (n) {
- _r5.push(null);
- };
+ r._read = function (n) {
+ r.push(null)
+ }
- _r5.once('end', function () {
+ r.once('end', function () {
// Verify that this is called before the next tick
- called = true;
- });
-
- _r5.read();
-
+ called = true
+ })
+ r.read()
process.nextTick(function () {
- assert.strictEqual(called, true);
- });
+ assert.strictEqual(called, true)
+ })
}
{
// Verify that adding readable listeners trigger data flow
- var _r6 = new R({
+ const r = new R({
highWaterMark: 5
- });
-
- var onReadable = false;
- var readCalled = 0;
-
- _r6._read = function (n) {
- if (readCalled++ === 2) _r6.push(null);else _r6.push(bufferShim.from('asdf'));
- };
+ })
+ let onReadable = false
+ let readCalled = 0
- _r6.on('readable', function () {
- onReadable = true;
-
- _r6.read();
- });
+ r._read = function (n) {
+ if (readCalled++ === 2) r.push(null)
+ else r.push(Buffer.from('asdf'))
+ }
- _r6.on('end', common.mustCall(function () {
- assert.strictEqual(readCalled, 3);
- assert.ok(onReadable);
- }));
+ r.on('readable', function () {
+ onReadable = true
+ r.read()
+ })
+ r.on(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(readCalled, 3)
+ assert.ok(onReadable)
+ })
+ )
}
{
// Verify that streams are chainable
- var _r7 = new R();
-
- _r7._read = common.mustCall();
-
- var r2 = _r7.setEncoding('utf8').pause().resume().pause();
-
- assert.strictEqual(_r7, r2);
+ const r = new R()
+ r._read = common.mustCall()
+ const r2 = r.setEncoding('utf8').pause().resume().pause()
+ assert.strictEqual(r, r2)
}
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
+{
+ // Verify readableEncoding property
+ assert(Reflect.has(R.prototype, 'readableEncoding'))
+ const r = new R({
+ encoding: 'utf8'
+ })
+ assert.strictEqual(r.readableEncoding, 'utf8')
}
+{
+ // Verify readableObjectMode property
+ assert(Reflect.has(R.prototype, 'readableObjectMode'))
+ const r = new R({
+ objectMode: true
+ })
+ assert.strictEqual(r.readableObjectMode, true)
+}
+{
+ // Verify writableObjectMode property
+ assert(Reflect.has(W.prototype, 'writableObjectMode'))
+ const w = new W({
+ objectMode: true
+ })
+ assert.strictEqual(w.writableObjectMode, true)
+}
+/* replacement start */
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-compatibility.js b/test/parallel/test-stream2-compatibility.js
index 04ab8eeced..c6bbfdb077 100644
--- a/test/parallel/test-stream2-compatibility.js
+++ b/test/parallel/test-stream2-compatibility.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,112 +18,70 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var R = require('../../lib/_stream_readable');
+const { Readable: R, Writable: W } = require('../../lib/ours/index')
-var W = require('../../lib/_stream_writable');
+const assert = require('assert')
-var assert = require('assert/');
+let ondataCalled = 0
-var ondataCalled = 0;
-
-var TestReader =
-/*#__PURE__*/
-function (_R) {
- _inherits(TestReader, _R);
-
- function TestReader() {
- var _this;
-
- _classCallCheck(this, TestReader);
-
- _this = _possibleConstructorReturn(this, _getPrototypeOf(TestReader).call(this));
- _this._buffer = bufferShim.alloc(100, 'x');
-
- _this.on('data', function () {
- ondataCalled++;
- });
-
- return _this;
+class TestReader extends R {
+ constructor() {
+ super()
+ this._buffer = Buffer.alloc(100, 'x')
+ this.on('data', () => {
+ ondataCalled++
+ })
}
- _createClass(TestReader, [{
- key: "_read",
- value: function _read(n) {
- this.push(this._buffer);
- this._buffer = bufferShim.alloc(0);
- }
- }]);
-
- return TestReader;
-}(R);
+ _read(n) {
+ this.push(this._buffer)
+ this._buffer = Buffer.alloc(0)
+ }
+}
-var reader = new TestReader();
+const reader = new TestReader()
setImmediate(function () {
- assert.strictEqual(ondataCalled, 1);
-
- require('tap').pass();
-
- reader.push(null);
-});
-
-var TestWriter =
-/*#__PURE__*/
-function (_W) {
- _inherits(TestWriter, _W);
-
- function TestWriter() {
- var _this2;
-
- _classCallCheck(this, TestWriter);
-
- _this2 = _possibleConstructorReturn(this, _getPrototypeOf(TestWriter).call(this));
-
- _this2.write('foo');
-
- _this2.end();
-
- return _this2;
+ assert.strictEqual(ondataCalled, 1)
+ silentConsole.log('ok')
+ reader.push(null)
+})
+
+class TestWriter extends W {
+ constructor() {
+ super()
+ this.write('foo')
+ this.end()
}
- _createClass(TestWriter, [{
- key: "_write",
- value: function _write(chunk, enc, cb) {
- cb();
- }
- }]);
-
- return TestWriter;
-}(W);
+ _write(chunk, enc, cb) {
+ cb()
+ }
+}
-var writer = new TestWriter();
+const writer = new TestWriter()
process.on('exit', function () {
- assert.strictEqual(reader.readable, false);
- assert.strictEqual(writer.writable, false);
-
- require('tap').pass();
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(reader.readable, false)
+ assert.strictEqual(writer.writable, false)
+ silentConsole.log('ok')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-decode-partial.js b/test/parallel/test-stream2-decode-partial.js
index 600e0c6583..3616e3f7a4 100644
--- a/test/parallel/test-stream2-decode-partial.js
+++ b/test/parallel/test-stream2-decode-partial.js
@@ -1,48 +1,42 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var Readable = require('../../lib/_stream_readable');
+const { Readable } = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
-var buf = '';
-var euro = bufferShim.from([0xE2, 0x82, 0xAC]);
-var cent = bufferShim.from([0xC2, 0xA2]);
-var source = Buffer.concat([euro, cent]);
-var readable = Readable({
+let buf = ''
+const euro = Buffer.from([0xe2, 0x82, 0xac])
+const cent = Buffer.from([0xc2, 0xa2])
+const source = Buffer.concat([euro, cent])
+const readable = Readable({
encoding: 'utf8'
-});
-readable.push(source.slice(0, 2));
-readable.push(source.slice(2, 4));
-readable.push(source.slice(4, source.length));
-;
-readable.push(null);
+})
+readable.push(source.slice(0, 2))
+readable.push(source.slice(2, 4))
+readable.push(source.slice(4, 6))
+readable.push(null)
readable.on('data', function (data) {
- buf += data;
-});
+ buf += data
+})
process.on('exit', function () {
- assert.strictEqual(buf, '€¢');
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ assert.strictEqual(buf, '€¢')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-finish-pipe-error.js b/test/parallel/test-stream2-finish-pipe-error.js
new file mode 100644
index 0000000000..622f7151bc
--- /dev/null
+++ b/test/parallel/test-stream2-finish-pipe-error.js
@@ -0,0 +1,39 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const stream = require('../../lib/ours/index')
+
+process.on('uncaughtException', common.mustCall())
+const r = new stream.Readable()
+
+r._read = function (size) {
+ r.push(Buffer.allocUnsafe(size))
+}
+
+const w = new stream.Writable()
+
+w._write = function (data, encoding, cb) {
+ cb(null)
+}
+
+r.pipe(w) // end() after pipe should cause unhandled exception
+
+w.end()
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-finish-pipe.js b/test/parallel/test-stream2-finish-pipe.js
index f20cdb0d24..afb5e5a899 100644
--- a/test/parallel/test-stream2-finish-pipe.js
+++ b/test/parallel/test-stream2-finish-pipe.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,47 +18,46 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var r = new stream.Readable();
+const r = new stream.Readable()
r._read = function (size) {
- r.push(bufferShim.allocUnsafe(size));
-};
+ r.push(Buffer.allocUnsafe(size))
+}
-var w = new stream.Writable();
+const w = new stream.Writable()
w._write = function (data, encoding, cb) {
- cb(null);
-};
-
-r.pipe(w); // This might sound unrealistic, but it happens in net.js. When
-// `socket.allowHalfOpen === false`, EOF will cause `.destroySoon()` call which
-// ends the writable side of net.Socket.
-
-w.end();
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ process.nextTick(cb, null)
+}
+
+r.pipe(w) // end() must be called in nextTick or a WRITE_AFTER_END error occurs.
+
+process.nextTick(() => {
+ // This might sound unrealistic, but it happens in net.js. When
+ // socket.allowHalfOpen === false, EOF will cause .destroySoon() call which
+ // ends the writable side of net.Socket.
+ w.end()
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-httpclient-response-end.js b/test/parallel/test-stream2-httpclient-response-end.js
new file mode 100644
index 0000000000..fbee5912d6
--- /dev/null
+++ b/test/parallel/test-stream2-httpclient-response-end.js
@@ -0,0 +1,62 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const http = require('http')
+
+const msg = 'Hello'
+const server = http
+ .createServer(function (req, res) {
+ res.writeHead(200, {
+ 'Content-Type': 'text/plain'
+ })
+ res.end(msg)
+ })
+ .listen(0, function () {
+ http.get(
+ {
+ port: this.address().port
+ },
+ function (res) {
+ let data = ''
+ res.on(
+ 'readable',
+ common.mustCall(function () {
+ silentConsole.log('readable event')
+ let chunk
+
+ while ((chunk = res.read()) !== null) {
+ data += chunk
+ }
+ })
+ )
+ res.on(
+ 'end',
+ common.mustCall(function () {
+ silentConsole.log('end event')
+ assert.strictEqual(msg, data)
+ server.close()
+ })
+ )
+ }
+ )
+ })
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-large-read-stall.js b/test/parallel/test-stream2-large-read-stall.js
index fe628142fa..e76dedaa73 100644
--- a/test/parallel/test-stream2-large-read-stall.js
+++ b/test/parallel/test-stream2-large-read-stall.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,79 +18,70 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/'); // If everything aligns so that you do a read(n) of exactly the
+const assert = require('assert') // If everything aligns so that you do a read(n) of exactly the
// remaining buffer, then make sure that 'end' still emits.
+const READSIZE = 100
+const PUSHSIZE = 20
+const PUSHCOUNT = 1000
+const HWM = 50
-var READSIZE = 100;
-var PUSHSIZE = 20;
-var PUSHCOUNT = 1000;
-var HWM = 50;
-
-var Readable = require('../../').Readable;
+const Readable = require('../../lib/ours/index').Readable
-var r = new Readable({
+const r = new Readable({
highWaterMark: HWM
-});
-var rs = r._readableState;
-r._read = push;
+})
+const rs = r._readableState
+r._read = push
r.on('readable', function () {
- ;
- false && console.error('>> readable');
- var ret;
+ silentConsole.error('>> readable')
+ let ret
do {
- ;
- false && console.error(" > read(".concat(READSIZE, ")"));
- ret = r.read(READSIZE);
- ;
- false && console.error(" < ".concat(ret && ret.length, " (").concat(rs.length, " remain)"));
- } while (ret && ret.length === READSIZE);
-
- ;
- false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length);
-});
-r.on('end', common.mustCall(function () {
- assert.strictEqual(pushes, PUSHCOUNT + 1);
-}));
-var pushes = 0;
+ silentConsole.error(` > read(${READSIZE})`)
+ ret = r.read(READSIZE)
+ silentConsole.error(` < ${ret && ret.length} (${rs.length} remain)`)
+ } while (ret && ret.length === READSIZE)
+
+ silentConsole.error('<< after read()', ret && ret.length, rs.needReadable, rs.length)
+})
+r.on(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(pushes, PUSHCOUNT + 1)
+ })
+)
+let pushes = 0
function push() {
- if (pushes > PUSHCOUNT) return;
+ if (pushes > PUSHCOUNT) return
if (pushes++ === PUSHCOUNT) {
- ;
- false && console.error(' push(EOF)');
- return r.push(null);
+ silentConsole.error(' push(EOF)')
+ return r.push(null)
}
- ;
- false && console.error(" push #".concat(pushes));
- if (r.push(bufferShim.allocUnsafe(PUSHSIZE))) setTimeout(push, 1);
+ silentConsole.error(` push #${pushes}`)
+ if (r.push(Buffer.allocUnsafe(PUSHSIZE))) setTimeout(push, 1)
}
+/* replacement start */
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-objects.js b/test/parallel/test-stream2-objects.js
index ec0c17d491..d6fcf14403 100644
--- a/test/parallel/test-stream2-objects.js
+++ b/test/parallel/test-stream2-objects.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,375 +18,371 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
+const tap = require('tap')
-var common = require('../common');
+const silentConsole = {
+ log() {},
-var Readable = require('../../lib/_stream_readable');
+ error() {}
+}
+const common = require('../common')
-var Writable = require('../../lib/_stream_writable');
+const { Readable, Writable } = require('../../lib/ours/index')
-var assert = require('assert/');
+const assert = require('assert')
function toArray(callback) {
- var stream = new Writable({
+ const stream = new Writable({
objectMode: true
- });
- var list = [];
+ })
+ const list = []
stream.write = function (chunk) {
- list.push(chunk);
- };
+ list.push(chunk)
+ }
stream.end = common.mustCall(function () {
- callback(list);
- });
- return stream;
+ callback(list)
+ })
+ return stream
}
function fromArray(list) {
- var r = new Readable({
+ const r = new Readable({
objectMode: true
- });
- r._read = common.mustNotCall();
- forEach(list, function (chunk) {
- r.push(chunk);
- });
- r.push(null);
- return r;
+ })
+ r._read = common.mustNotCall()
+ list.forEach(function (chunk) {
+ r.push(chunk)
+ })
+ r.push(null)
+ return r
}
{
// Verify that objects can be read from the stream
- var r = fromArray([{
- one: '1'
- }, {
- two: '2'
- }]);
- var v1 = r.read();
- var v2 = r.read();
- var v3 = r.read();
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ const v1 = r.read()
+ const v2 = r.read()
+ const v3 = r.read()
assert.deepStrictEqual(v1, {
one: '1'
- });
+ })
assert.deepStrictEqual(v2, {
two: '2'
- });
- assert.deepStrictEqual(v3, null);
+ })
+ assert.strictEqual(v3, null)
}
{
// Verify that objects can be piped into the stream
- var _r = fromArray([{
- one: '1'
- }, {
- two: '2'
- }]);
-
- _r.pipe(toArray(common.mustCall(function (list) {
- assert.deepStrictEqual(list, [{
+ const r = fromArray([
+ {
one: '1'
- }, {
+ },
+ {
two: '2'
- }]);
- })));
+ }
+ ])
+ r.pipe(
+ toArray(
+ common.mustCall(function (list) {
+ assert.deepStrictEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ )
}
{
// Verify that read(n) is ignored
- var _r2 = fromArray([{
- one: '1'
- }, {
- two: '2'
- }]);
-
- var value = _r2.read(2);
-
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ const value = r.read(2)
assert.deepStrictEqual(value, {
one: '1'
- });
+ })
}
{
// Verify that objects can be synchronously read
- var _r3 = new Readable({
+ const r = new Readable({
objectMode: true
- });
-
- var list = [{
- one: '1'
- }, {
- two: '2'
- }];
-
- _r3._read = function (n) {
- var item = list.shift();
-
- _r3.push(item || null);
- };
-
- _r3.pipe(toArray(common.mustCall(function (list) {
- assert.deepStrictEqual(list, [{
+ })
+ const list = [
+ {
one: '1'
- }, {
+ },
+ {
two: '2'
- }]);
- })));
+ }
+ ]
+
+ r._read = function (n) {
+ const item = list.shift()
+ r.push(item || null)
+ }
+
+ r.pipe(
+ toArray(
+ common.mustCall(function (list) {
+ assert.deepStrictEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ )
}
{
// Verify that objects can be asynchronously read
- var _r4 = new Readable({
+ const r = new Readable({
objectMode: true
- });
-
- var _list2 = [{
- one: '1'
- }, {
- two: '2'
- }];
-
- _r4._read = function (n) {
- var item = _list2.shift();
+ })
+ const list = [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ]
+ r._read = function (n) {
+ const item = list.shift()
process.nextTick(function () {
- _r4.push(item || null);
- });
- };
+ r.push(item || null)
+ })
+ }
- _r4.pipe(toArray(common.mustCall(function (list) {
- assert.deepStrictEqual(list, [{
- one: '1'
- }, {
- two: '2'
- }]);
- })));
+ r.pipe(
+ toArray(
+ common.mustCall(function (list) {
+ assert.deepStrictEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ )
}
{
// Verify that strings can be read as objects
- var _r5 = new Readable({
+ const r = new Readable({
objectMode: true
- });
-
- _r5._read = common.mustNotCall();
- var _list3 = ['one', 'two', 'three'];
- forEach(_list3, function (str) {
- _r5.push(str);
- });
-
- _r5.push(null);
-
- _r5.pipe(toArray(common.mustCall(function (array) {
- assert.deepStrictEqual(array, _list3);
- })));
+ })
+ r._read = common.mustNotCall()
+ const list = ['one', 'two', 'three']
+ list.forEach(function (str) {
+ r.push(str)
+ })
+ r.push(null)
+ r.pipe(
+ toArray(
+ common.mustCall(function (array) {
+ assert.deepStrictEqual(array, list)
+ })
+ )
+ )
}
{
// Verify read(0) behavior for object streams
- var _r6 = new Readable({
+ const r = new Readable({
objectMode: true
- });
-
- _r6._read = common.mustNotCall();
-
- _r6.push('foobar');
-
- _r6.push(null);
-
- _r6.pipe(toArray(common.mustCall(function (array) {
- assert.deepStrictEqual(array, ['foobar']);
- })));
+ })
+ r._read = common.mustNotCall()
+ r.push('foobar')
+ r.push(null)
+ r.pipe(
+ toArray(
+ common.mustCall(function (array) {
+ assert.deepStrictEqual(array, ['foobar'])
+ })
+ )
+ )
}
{
// Verify the behavior of pushing falsey values
- var _r7 = new Readable({
+ const r = new Readable({
objectMode: true
- });
-
- _r7._read = common.mustNotCall();
-
- _r7.push(false);
-
- _r7.push(0);
-
- _r7.push('');
-
- _r7.push(null);
-
- _r7.pipe(toArray(common.mustCall(function (array) {
- assert.deepStrictEqual(array, [false, 0, '']);
- })));
+ })
+ r._read = common.mustNotCall()
+ r.push(false)
+ r.push(0)
+ r.push('')
+ r.push(null)
+ r.pipe(
+ toArray(
+ common.mustCall(function (array) {
+ assert.deepStrictEqual(array, [false, 0, ''])
+ })
+ )
+ )
}
{
// Verify high watermark _read() behavior
- var _r8 = new Readable({
+ const r = new Readable({
highWaterMark: 6,
objectMode: true
- });
-
- var calls = 0;
- var _list4 = ['1', '2', '3', '4', '5', '6', '7', '8'];
-
- _r8._read = function (n) {
- calls++;
- };
-
- forEach(_list4, function (c) {
- _r8.push(c);
- });
-
- var v = _r8.read();
+ })
+ let calls = 0
+ const list = ['1', '2', '3', '4', '5', '6', '7', '8']
- assert.strictEqual(calls, 0);
- assert.strictEqual(v, '1');
-
- var _v = _r8.read();
-
- assert.strictEqual(_v, '2');
-
- var _v2 = _r8.read();
+ r._read = function (n) {
+ calls++
+ }
- assert.strictEqual(_v2, '3');
- assert.strictEqual(calls, 1);
+ list.forEach(function (c) {
+ r.push(c)
+ })
+ const v = r.read()
+ assert.strictEqual(calls, 0)
+ assert.strictEqual(v, '1')
+ const v2 = r.read()
+ assert.strictEqual(v2, '2')
+ const v3 = r.read()
+ assert.strictEqual(v3, '3')
+ assert.strictEqual(calls, 1)
}
{
// Verify high watermark push behavior
- var _r9 = new Readable({
+ const r = new Readable({
highWaterMark: 6,
objectMode: true
- });
-
- _r9._read = common.mustNotCall();
-
- for (var i = 0; i < 6; i++) {
- var bool = _r9.push(i);
+ })
+ r._read = common.mustNotCall()
- assert.strictEqual(bool, i !== 5);
+ for (let i = 0; i < 6; i++) {
+ const bool = r.push(i)
+ assert.strictEqual(bool, i !== 5)
}
}
{
// Verify that objects can be written to stream
- var w = new Writable({
+ const w = new Writable({
objectMode: true
- });
+ })
w._write = function (chunk, encoding, cb) {
assert.deepStrictEqual(chunk, {
foo: 'bar'
- });
- cb();
- };
+ })
+ cb()
+ }
- w.on('finish', common.mustCall());
+ w.on('finish', common.mustCall())
w.write({
foo: 'bar'
- });
- w.end();
+ })
+ w.end()
}
{
// Verify that multiple objects can be written to stream
- var _w = new Writable({
+ const w = new Writable({
objectMode: true
- });
-
- var _list5 = [];
-
- _w._write = function (chunk, encoding, cb) {
- _list5.push(chunk);
-
- cb();
- };
-
- _w.on('finish', common.mustCall(function () {
- assert.deepStrictEqual(_list5, [0, 1, 2, 3, 4]);
- }));
-
- _w.write(0);
+ })
+ const list = []
- _w.write(1);
-
- _w.write(2);
-
- _w.write(3);
-
- _w.write(4);
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ cb()
+ }
- _w.end();
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.deepStrictEqual(list, [0, 1, 2, 3, 4])
+ })
+ )
+ w.write(0)
+ w.write(1)
+ w.write(2)
+ w.write(3)
+ w.write(4)
+ w.end()
}
{
// Verify that strings can be written as objects
- var _w2 = new Writable({
+ const w = new Writable({
objectMode: true
- });
+ })
+ const list = []
- var _list6 = [];
-
- _w2._write = function (chunk, encoding, cb) {
- _list6.push(chunk);
-
- process.nextTick(cb);
- };
-
- _w2.on('finish', common.mustCall(function () {
- assert.deepStrictEqual(_list6, ['0', '1', '2', '3', '4']);
- }));
-
- _w2.write('0');
-
- _w2.write('1');
-
- _w2.write('2');
-
- _w2.write('3');
-
- _w2.write('4');
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ process.nextTick(cb)
+ }
- _w2.end();
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.deepStrictEqual(list, ['0', '1', '2', '3', '4'])
+ })
+ )
+ w.write('0')
+ w.write('1')
+ w.write('2')
+ w.write('3')
+ w.write('4')
+ w.end()
}
{
// Verify that stream buffers finish until callback is called
- var _w3 = new Writable({
+ const w = new Writable({
objectMode: true
- });
-
- var called = false;
+ })
+ let called = false
- _w3._write = function (chunk, encoding, cb) {
- assert.strictEqual(chunk, 'foo');
+ w._write = function (chunk, encoding, cb) {
+ assert.strictEqual(chunk, 'foo')
process.nextTick(function () {
- called = true;
- cb();
- });
- };
-
- _w3.on('finish', common.mustCall(function () {
- assert.strictEqual(called, true);
- }));
-
- _w3.write('foo');
+ called = true
+ cb()
+ })
+ }
- _w3.end();
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(called, true)
+ })
+ )
+ w.write('foo')
+ w.end()
}
+/* replacement start */
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
}
-}
-
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-pipe-error-handling.js b/test/parallel/test-stream2-pipe-error-handling.js
index e621d48dc6..f6fc74204b 100644
--- a/test/parallel/test-stream2-pipe-error-handling.js
+++ b/test/parallel/test-stream2-pipe-error-handling.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,117 +18,109 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
{
- var count = 1000;
- var source = new stream.Readable();
+ let count = 1000
+ const source = new stream.Readable()
source._read = function (n) {
- n = Math.min(count, n);
- count -= n;
- source.push(bufferShim.allocUnsafe(n));
- };
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.allocUnsafe(n))
+ }
- var unpipedDest;
+ let unpipedDest
source.unpipe = function (dest) {
- unpipedDest = dest;
- stream.Readable.prototype.unpipe.call(this, dest);
- };
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
- var dest = new stream.Writable();
+ const dest = new stream.Writable()
dest._write = function (chunk, encoding, cb) {
- cb();
- };
+ cb()
+ }
- source.pipe(dest);
- var gotErr = null;
+ source.pipe(dest)
+ let gotErr = null
dest.on('error', function (err) {
- gotErr = err;
- });
- var unpipedSource;
+ gotErr = err
+ })
+ let unpipedSource
dest.on('unpipe', function (src) {
- unpipedSource = src;
- });
- var err = new Error('This stream turned into bacon.');
- dest.emit('error', err);
- assert.strictEqual(gotErr, err);
- assert.strictEqual(unpipedSource, source);
- assert.strictEqual(unpipedDest, dest);
+ unpipedSource = src
+ })
+ const err = new Error('This stream turned into bacon.')
+ dest.emit('error', err)
+ assert.strictEqual(gotErr, err)
+ assert.strictEqual(unpipedSource, source)
+ assert.strictEqual(unpipedDest, dest)
}
{
- var _count = 1000;
-
- var _source = new stream.Readable();
+ let count = 1000
+ const source = new stream.Readable()
- _source._read = function (n) {
- n = Math.min(_count, n);
- _count -= n;
-
- _source.push(bufferShim.allocUnsafe(n));
- };
-
- var _unpipedDest;
-
- _source.unpipe = function (dest) {
- _unpipedDest = dest;
- stream.Readable.prototype.unpipe.call(this, dest);
- };
-
- var _dest = new stream.Writable();
-
- _dest._write = function (chunk, encoding, cb) {
- cb();
- };
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.allocUnsafe(n))
+ }
- _source.pipe(_dest);
+ let unpipedDest
- var _unpipedSource;
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
- _dest.on('unpipe', function (src) {
- _unpipedSource = src;
- });
+ const dest = new stream.Writable({
+ autoDestroy: false
+ })
- var _err = new Error('This stream turned into bacon.');
+ dest._write = function (chunk, encoding, cb) {
+ cb()
+ }
- var _gotErr = null;
+ source.pipe(dest)
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+ const err = new Error('This stream turned into bacon.')
+ let gotErr = null
try {
- _dest.emit('error', _err);
+ dest.emit('error', err)
} catch (e) {
- _gotErr = e;
+ gotErr = e
}
- assert.strictEqual(_gotErr, _err);
- assert.strictEqual(_unpipedSource, _source);
- assert.strictEqual(_unpipedDest, _dest);
+ assert.strictEqual(gotErr, err)
+ assert.strictEqual(unpipedSource, source)
+ assert.strictEqual(unpipedDest, dest)
}
-;
+/* replacement start */
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-pipe-error-once-listener.js b/test/parallel/test-stream2-pipe-error-once-listener.js
index ad613095d1..0158b0544f 100644
--- a/test/parallel/test-stream2-pipe-error-once-listener.js
+++ b/test/parallel/test-stream2-pipe-error-once-listener.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,84 +18,50 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var Read =
-/*#__PURE__*/
-function (_stream$Readable) {
- _inherits(Read, _stream$Readable);
-
- function Read() {
- _classCallCheck(this, Read);
-
- return _possibleConstructorReturn(this, _getPrototypeOf(Read).apply(this, arguments));
+class Read extends stream.Readable {
+ _read(size) {
+ this.push('x')
+ this.push(null)
}
+}
- _createClass(Read, [{
- key: "_read",
- value: function _read(size) {
- this.push('x');
- this.push(null);
- }
- }]);
-
- return Read;
-}(stream.Readable);
-
-var Write =
-/*#__PURE__*/
-function (_stream$Writable) {
- _inherits(Write, _stream$Writable);
-
- function Write() {
- _classCallCheck(this, Write);
-
- return _possibleConstructorReturn(this, _getPrototypeOf(Write).apply(this, arguments));
+class Write extends stream.Writable {
+ _write(buffer, encoding, cb) {
+ this.emit('error', new Error('boom'))
+ this.emit('alldone')
}
+}
- _createClass(Write, [{
- key: "_write",
- value: function _write(buffer, encoding, cb) {
- this.emit('error', new Error('boom'));
- this.emit('alldone');
- }
- }]);
-
- return Write;
-}(stream.Writable);
-
-var read = new Read();
-var write = new Write();
-write.once('error', function () {});
+const read = new Read()
+const write = new Write()
+write.once('error', () => {})
write.once('alldone', function (err) {
- require('tap').pass();
-});
+ silentConsole.log('ok')
+})
process.on('exit', function (c) {
- console.error('error thrown even with listener');
-});
-read.pipe(write);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ silentConsole.error('error thrown even with listener')
+})
+read.pipe(write)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-push.js b/test/parallel/test-stream2-push.js
index b7b9083f0e..4c78fe445a 100644
--- a/test/parallel/test-stream2-push.js
+++ b/test/parallel/test-stream2-push.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,124 +18,123 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
+const tap = require('tap')
-require('../common');
+const silentConsole = {
+ log() {},
-var assert = require('assert/');
+ error() {}
+}
+require('../common')
-var _require = require('../../'),
- Readable = _require.Readable,
- Writable = _require.Writable;
+const assert = require('assert')
-var EE = require('events').EventEmitter; // a mock thing a bit like the net.Socket/tcp_wrap.handle interaction
+const { Readable, Writable } = require('../../lib/ours/index')
+const EE = require('events').EventEmitter // A mock thing a bit like the net.Socket/tcp_wrap.handle interaction
-var stream = new Readable({
+const stream = new Readable({
highWaterMark: 16,
encoding: 'utf8'
-});
-var source = new EE();
+})
+const source = new EE()
stream._read = function () {
- console.error('stream._read');
- readStart();
-};
+ silentConsole.error('stream._read')
+ readStart()
+}
-var ended = false;
+let ended = false
stream.on('end', function () {
- ended = true;
-});
+ ended = true
+})
source.on('data', function (chunk) {
- var ret = stream.push(chunk);
- console.error('data', stream.readableLength);
- if (!ret) readStop();
-});
+ const ret = stream.push(chunk)
+ silentConsole.error('data', stream.readableLength)
+ if (!ret) readStop()
+})
source.on('end', function () {
- stream.push(null);
-});
-var reading = false;
+ stream.push(null)
+})
+let reading = false
function readStart() {
- console.error('readStart');
- reading = true;
+ silentConsole.error('readStart')
+ reading = true
}
function readStop() {
- console.error('readStop');
- reading = false;
+ silentConsole.error('readStop')
+ reading = false
process.nextTick(function () {
- var r = stream.read();
- if (r !== null) writer.write(r);
- });
+ const r = stream.read()
+ if (r !== null) writer.write(r)
+ })
}
-var writer = new Writable({
+const writer = new Writable({
decodeStrings: false
-});
-var written = [];
-var expectWritten = ['asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg'];
+})
+const written = []
+const expectWritten = [
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg'
+]
writer._write = function (chunk, encoding, cb) {
- console.error("WRITE ".concat(chunk));
- written.push(chunk);
- process.nextTick(cb);
-};
+ silentConsole.error(`WRITE ${chunk}`)
+ written.push(chunk)
+ process.nextTick(cb)
+}
-writer.on('finish', finish); // now emit some chunks.
+writer.on('finish', finish) // Now emit some chunks.
-var chunk = 'asdfg';
-var set = 0;
-readStart();
-data();
+const chunk = 'asdfg'
+let set = 0
+readStart()
+data()
function data() {
- assert(reading);
- source.emit('data', chunk);
- assert(reading);
- source.emit('data', chunk);
- assert(reading);
- source.emit('data', chunk);
- assert(reading);
- source.emit('data', chunk);
- assert(!reading);
- if (set++ < 5) setTimeout(data, 10);else end();
+ assert(reading)
+ source.emit('data', chunk)
+ assert(reading)
+ source.emit('data', chunk)
+ assert(reading)
+ source.emit('data', chunk)
+ assert(reading)
+ source.emit('data', chunk)
+ assert(!reading)
+ if (set++ < 5) setTimeout(data, 10)
+ else end()
}
function finish() {
- console.error('finish');
- assert.deepStrictEqual(written, expectWritten);
-
- require('tap').pass();
+ silentConsole.error('finish')
+ assert.deepStrictEqual(written, expectWritten)
+ silentConsole.log('ok')
}
function end() {
- source.emit('end');
- assert(!reading);
- writer.end(stream.read());
+ source.emit('end')
+ assert(!reading)
+ writer.end(stream.read())
setImmediate(function () {
- assert(ended);
- });
+ assert(ended)
+ })
}
-
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-read-sync-stack.js b/test/parallel/test-stream2-read-sync-stack.js
index 7b94553928..099a4729b7 100644
--- a/test/parallel/test-stream2-read-sync-stack.js
+++ b/test/parallel/test-stream2-read-sync-stack.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,47 +18,42 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var Readable = require('../../').Readable; // This tests synchronous read callbacks and verifies that even if they nest
+const Readable = require('../../lib/ours/index').Readable // This tests synchronous read callbacks and verifies that even if they nest
// heavily the process handles it without an error
-
-var r = new Readable();
-var N = 256 * 1024;
-var reads = 0;
+const r = new Readable()
+const N = 256 * 1024
+let reads = 0
r._read = function (n) {
- var chunk = reads++ === N ? null : bufferShim.allocUnsafe(1);
- r.push(chunk);
-};
+ const chunk = reads++ === N ? null : Buffer.allocUnsafe(1)
+ r.push(chunk)
+}
r.on('readable', function onReadable() {
- if (!(r.readableLength % 256)) console.error('readable', r.readableLength);
- r.read(N * 2);
-});
-r.on('end', common.mustCall());
-r.read(0);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ if (!(r.readableLength % 256)) silentConsole.error('readable', r.readableLength)
+ r.read(N * 2)
+})
+r.on('end', common.mustCall())
+r.read(0)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js
index d62979a07f..eda7613712 100644
--- a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js
+++ b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,23 +18,26 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var Readable = require('../../').Readable;
+const Readable = require('../../lib/ours/index').Readable
-test1();
-test2();
+test1()
+test2()
function test1() {
- var r = new Readable(); // should not end when we get a bufferShim.alloc(0) or '' as the _read
+ const r = new Readable() // Should not end when we get a Buffer.alloc(0) or '' as the _read
// result that just means that there is *temporarily* no data, but to
// go ahead and try again later.
//
@@ -46,114 +47,101 @@ function test1() {
// r.read(0) again later, otherwise there is no more work being done
// and the process just exits.
- var buf = bufferShim.alloc(5, 'x');
- var reads = 5;
+ const buf = Buffer.alloc(5, 'x')
+ let reads = 5
r._read = function (n) {
switch (reads--) {
case 5:
- return setImmediate(function () {
- return r.push(buf);
- });
+ return setImmediate(() => {
+ return r.push(buf)
+ })
case 4:
- setImmediate(function () {
- return r.push(bufferShim.alloc(0));
- });
- return setImmediate(r.read.bind(r, 0));
+ setImmediate(() => {
+ return r.push(Buffer.alloc(0))
+ })
+ return setImmediate(r.read.bind(r, 0))
case 3:
- setTimeout(r.read.bind(r, 0), 50);
- return process.nextTick(function () {
- return r.push(bufferShim.alloc(0));
- });
+ setImmediate(r.read.bind(r, 0))
+ return process.nextTick(() => {
+ return r.push(Buffer.alloc(0))
+ })
case 2:
- setImmediate(r.read.bind(r, 0));
- return r.push(bufferShim.alloc(0));
+ setImmediate(r.read.bind(r, 0))
+ return r.push(Buffer.alloc(0))
// Not-EOF!
case 1:
- return r.push(buf);
+ return r.push(buf)
case 0:
- return r.push(null);
+ return r.push(null)
// EOF
default:
- throw new Error('unreachable');
+ throw new Error('unreachable')
}
- };
+ }
- var results = [];
+ const results = []
function flow() {
- var chunk;
+ let chunk
- while (null !== (chunk = r.read())) {
- results.push(String(chunk));
- }
+ while (null !== (chunk = r.read())) results.push(String(chunk))
}
- r.on('readable', flow);
- r.on('end', function () {
- results.push('EOF');
- });
- flow();
- process.on('exit', function () {
- assert.deepStrictEqual(results, ['xxxxx', 'xxxxx', 'EOF']);
-
- require('tap').pass();
- });
+ r.on('readable', flow)
+ r.on('end', () => {
+ results.push('EOF')
+ })
+ flow()
+ process.on('exit', () => {
+ assert.deepStrictEqual(results, ['xxxxx', 'xxxxx', 'EOF'])
+ silentConsole.log('ok')
+ })
}
function test2() {
- var r = new Readable({
+ const r = new Readable({
encoding: 'base64'
- });
- var reads = 5;
+ })
+ let reads = 5
r._read = function (n) {
- if (!reads--) return r.push(null); // EOF
- else return r.push(bufferShim.from('x'));
- };
+ if (!reads--) return r.push(null) // EOF
+
+ return r.push(Buffer.from('x'))
+ }
- var results = [];
+ const results = []
function flow() {
- var chunk;
+ let chunk
- while (null !== (chunk = r.read())) {
- results.push(String(chunk));
- }
+ while (null !== (chunk = r.read())) results.push(String(chunk))
}
- r.on('readable', flow);
- r.on('end', function () {
- results.push('EOF');
- });
- flow();
- process.on('exit', function () {
- assert.deepStrictEqual(results, ['eHh4', 'eHg=', 'EOF']);
-
- require('tap').pass();
- });
+ r.on('readable', flow)
+ r.on('end', () => {
+ results.push('EOF')
+ })
+ flow()
+ process.on('exit', () => {
+ assert.deepStrictEqual(results, ['eHh4', 'eHg=', 'EOF'])
+ silentConsole.log('ok')
+ })
}
+/* replacement start */
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-from-list.js b/test/parallel/test-stream2-readable-from-list.js
index 90376973d9..c883c05d29 100644
--- a/test/parallel/test-stream2-readable-from-list.js
+++ b/test/parallel/test-stream2-readable-from-list.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,116 +18,110 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+// Flags: --expose-internals
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var fromList = require('../../lib/_stream_readable')._fromList;
+const fromList = require('../../lib/ours/index').Readable._fromList
-var BufferList = require('../../lib/internal/streams/buffer_list');
+const BufferList = require('../../lib/internal/streams/buffer_list')
-var util = require('util');
+const util = require('util')
function bufferListFromArray(arr) {
- var bl = new BufferList();
+ const bl = new BufferList()
- for (var i = 0; i < arr.length; ++i) {
- bl.push(arr[i]);
- }
+ for (let i = 0; i < arr.length; ++i) bl.push(arr[i])
- return bl;
+ return bl
}
{
// Verify behavior with buffers
- var list = [bufferShim.from('foog'), bufferShim.from('bark'), bufferShim.from('bazy'), bufferShim.from('kuel')];
- list = bufferListFromArray(list);
- assert.strictEqual(util.inspect([list], {
- compact: false
- }).indexOf('BufferList') > 0, true); // read more than the first element.
+ let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')]
+ list = bufferListFromArray(list)
+ assert.strictEqual(typeof list.head, 'object')
+ assert.strictEqual(typeof list.tail, 'object')
+ assert.strictEqual(list.length, 4) // Read more than the first element.
- var ret = fromList(6, {
+ let ret = fromList(6, {
buffer: list,
length: 16
- });
- assert.strictEqual(ret.toString(), 'foogba'); // read exactly the first element.
+ })
+ assert.strictEqual(ret.toString(), 'foogba') // Read exactly the first element.
ret = fromList(2, {
buffer: list,
length: 10
- });
- assert.strictEqual(ret.toString(), 'rk'); // read less than the first element.
+ })
+ assert.strictEqual(ret.toString(), 'rk') // Read less than the first element.
ret = fromList(2, {
buffer: list,
length: 8
- });
- assert.strictEqual(ret.toString(), 'ba'); // read more than we have.
+ })
+ assert.strictEqual(ret.toString(), 'ba') // Read more than we have.
ret = fromList(100, {
buffer: list,
length: 6
- });
- assert.strictEqual(ret.toString(), 'zykuel'); // all consumed.
+ })
+ assert.strictEqual(ret.toString(), 'zykuel') // all consumed.
- assert.deepStrictEqual(list, new BufferList());
+ assert.deepStrictEqual(list, new BufferList())
}
{
// Verify behavior with strings
- var _list2 = ['foog', 'bark', 'bazy', 'kuel'];
- _list2 = bufferListFromArray(_list2); // read more than the first element.
+ let list = ['foog', 'bark', 'bazy', 'kuel']
+ list = bufferListFromArray(list) // Read more than the first element.
- var _ret = fromList(6, {
- buffer: _list2,
+ let ret = fromList(6, {
+ buffer: list,
length: 16,
decoder: true
- });
-
- assert.strictEqual(_ret, 'foogba'); // read exactly the first element.
+ })
+ assert.strictEqual(ret, 'foogba') // Read exactly the first element.
- _ret = fromList(2, {
- buffer: _list2,
+ ret = fromList(2, {
+ buffer: list,
length: 10,
decoder: true
- });
- assert.strictEqual(_ret, 'rk'); // read less than the first element.
+ })
+ assert.strictEqual(ret, 'rk') // Read less than the first element.
- _ret = fromList(2, {
- buffer: _list2,
+ ret = fromList(2, {
+ buffer: list,
length: 8,
decoder: true
- });
- assert.strictEqual(_ret, 'ba'); // read more than we have.
+ })
+ assert.strictEqual(ret, 'ba') // Read more than we have.
- _ret = fromList(100, {
- buffer: _list2,
+ ret = fromList(100, {
+ buffer: list,
length: 6,
decoder: true
- });
- assert.strictEqual(_ret, 'zykuel'); // all consumed.
+ })
+ assert.strictEqual(ret, 'zykuel') // all consumed.
- assert.deepStrictEqual(_list2, new BufferList());
+ assert.deepStrictEqual(list, new BufferList())
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
+/* replacement start */
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-legacy-drain.js b/test/parallel/test-stream2-readable-legacy-drain.js
index 08cd9a57d8..a90325aa34 100644
--- a/test/parallel/test-stream2-readable-legacy-drain.js
+++ b/test/parallel/test-stream2-readable-legacy-drain.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,67 +18,56 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
+const tap = require('tap')
-var common = require('../common');
+const silentConsole = {
+ log() {},
-var assert = require('assert/');
+ error() {}
+}
+const common = require('../common')
-var Stream = require('../../');
+const assert = require('assert')
-var Readable = require('../../').Readable;
+const Stream = require('../../lib/ours/index')
-var r = new Readable();
-var N = 256;
-var reads = 0;
+const Readable = Stream.Readable
+const r = new Readable()
+const N = 256
+let reads = 0
r._read = function (n) {
- return r.push(++reads === N ? null : bufferShim.allocUnsafe(1));
-};
+ return r.push(++reads === N ? null : Buffer.allocUnsafe(1))
+}
-r.on('end', common.mustCall());
-var w = new Stream();
-w.writable = true;
-var buffered = 0;
+r.on('end', common.mustCall())
+const w = new Stream()
+w.writable = true
+let buffered = 0
w.write = function (c) {
- buffered += c.length;
- process.nextTick(drain);
- return false;
-};
+ buffered += c.length
+ process.nextTick(drain)
+ return false
+}
function drain() {
- assert(buffered <= 3);
- buffered = 0;
- w.emit('drain');
+ assert(buffered <= 3)
+ buffered = 0
+ w.emit('drain')
}
-w.end = common.mustCall(); // Just for kicks, let's mess with the drain count.
-// This verifies that even if it gets negative in the
-// pipe() cleanup function, we'll still function properly.
-
-r.on('readable', function () {
- w.emit('drain');
-});
-r.pipe(w);
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+w.end = common.mustCall()
+r.pipe(w)
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-non-empty-end.js b/test/parallel/test-stream2-readable-non-empty-end.js
index 5b129fd4f6..bf4245d5fd 100644
--- a/test/parallel/test-stream2-readable-non-empty-end.js
+++ b/test/parallel/test-stream2-readable-non-empty-end.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,83 +18,78 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var Readable = require('../../lib/_stream_readable');
+const { Readable } = require('../../lib/ours/index')
-var len = 0;
-var chunks = new Array(10);
+let len = 0
+const chunks = new Array(10)
-for (var i = 1; i <= 10; i++) {
- chunks[i - 1] = bufferShim.allocUnsafe(i);
- len += i;
+for (let i = 1; i <= 10; i++) {
+ chunks[i - 1] = Buffer.allocUnsafe(i)
+ len += i
}
-var test = new Readable();
-var n = 0;
+const test = new Readable()
+let n = 0
test._read = function (size) {
- var chunk = chunks[n++];
+ const chunk = chunks[n++]
setTimeout(function () {
- test.push(chunk === undefined ? null : chunk);
- }, 1);
-};
+ test.push(chunk === undefined ? null : chunk)
+ }, 1)
+}
-test.on('end', thrower);
+test.on('end', thrower)
function thrower() {
- throw new Error('this should not happen!');
+ throw new Error('this should not happen!')
}
-var bytesread = 0;
+let bytesread = 0
test.on('readable', function () {
- var b = len - bytesread - 1;
- var res = test.read(b);
+ const b = len - bytesread - 1
+ const res = test.read(b)
if (res) {
- bytesread += res.length;
- console.error("br=".concat(bytesread, " len=").concat(len));
- setTimeout(next, 1);
+ bytesread += res.length
+ silentConsole.error(`br=${bytesread} len=${len}`)
+ setTimeout(next, 1)
}
- test.read(0);
-});
-test.read(0);
+ test.read(0)
+})
+test.read(0)
function next() {
- // now let's make 'end' happen
- test.removeListener('end', thrower);
- test.on('end', common.mustCall()); // one to get the last byte
-
- var r = test.read();
- assert(r);
- assert.strictEqual(r.length, 1);
- r = test.read();
- assert.strictEqual(r, null);
+ // Now let's make 'end' happen
+ test.removeListener('end', thrower)
+ test.on('end', common.mustCall()) // One to get the last byte
+
+ let r = test.read()
+ assert(r)
+ assert.strictEqual(r.length, 1)
+ r = test.read()
+ assert.strictEqual(r, null)
}
+/* replacement start */
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-wrap-destroy.js b/test/parallel/test-stream2-readable-wrap-destroy.js
new file mode 100644
index 0000000000..b2c615b65d
--- /dev/null
+++ b/test/parallel/test-stream2-readable-wrap-destroy.js
@@ -0,0 +1,45 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const { Readable } = require('../../lib/ours/index')
+
+const EE = require('events').EventEmitter
+
+const oldStream = new EE()
+
+oldStream.pause = () => {}
+
+oldStream.resume = () => {}
+
+{
+ new Readable({
+ autoDestroy: false,
+ destroy: common.mustCall()
+ }).wrap(oldStream)
+ oldStream.emit('destroy')
+}
+{
+ new Readable({
+ autoDestroy: false,
+ destroy: common.mustCall()
+ }).wrap(oldStream)
+ oldStream.emit('close')
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-wrap-empty.js b/test/parallel/test-stream2-readable-wrap-empty.js
index 99c4f4e1f9..3b23b719a4 100644
--- a/test/parallel/test-stream2-readable-wrap-empty.js
+++ b/test/parallel/test-stream2-readable-wrap-empty.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,41 +18,37 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var Readable = require('../../lib/_stream_readable');
-
-var EE = require('events').EventEmitter;
-
-var oldStream = new EE();
+const tap = require('tap')
-oldStream.pause = function () {};
+const silentConsole = {
+ log() {},
-oldStream.resume = function () {};
+ error() {}
+}
+const common = require('../common')
-var newStream = new Readable().wrap(oldStream);
-newStream.on('readable', function () {}).on('end', common.mustCall());
-oldStream.emit('end');
-;
+const { Readable } = require('../../lib/ours/index')
-(function () {
- var t = require('tap');
+const EE = require('events').EventEmitter
- t.pass('sync run');
-})();
+const oldStream = new EE()
-var _list = process.listeners('uncaughtException');
+oldStream.pause = () => {}
-process.removeAllListeners('uncaughtException');
+oldStream.resume = () => {}
-_list.pop();
+const newStream = new Readable().wrap(oldStream)
+newStream.on('readable', () => {}).on('end', common.mustCall())
+oldStream.emit('end')
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-wrap-error.js b/test/parallel/test-stream2-readable-wrap-error.js
new file mode 100644
index 0000000000..80c4e37c0e
--- /dev/null
+++ b/test/parallel/test-stream2-readable-wrap-error.js
@@ -0,0 +1,67 @@
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Readable } = require('../../lib/ours/index')
+
+const EE = require('events').EventEmitter
+
+class LegacyStream extends EE {
+ pause() {}
+
+ resume() {}
+}
+
+{
+ const err = new Error()
+ const oldStream = new LegacyStream()
+ const r = new Readable({
+ autoDestroy: true
+ })
+ .wrap(oldStream)
+ .on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.errorEmitted, true)
+ assert.strictEqual(r._readableState.errored, err)
+ assert.strictEqual(r.destroyed, true)
+ })
+ )
+ oldStream.emit('error', err)
+}
+{
+ const err = new Error()
+ const oldStream = new LegacyStream()
+ const r = new Readable({
+ autoDestroy: false
+ })
+ .wrap(oldStream)
+ .on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.errorEmitted, true)
+ assert.strictEqual(r._readableState.errored, err)
+ assert.strictEqual(r.destroyed, false)
+ })
+ )
+ oldStream.emit('error', err)
+}
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-wrap.js b/test/parallel/test-stream2-readable-wrap.js
new file mode 100644
index 0000000000..b56629e39d
--- /dev/null
+++ b/test/parallel/test-stream2-readable-wrap.js
@@ -0,0 +1,148 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
+
+const { Readable, Writable } = require('../../lib/ours/index')
+
+const EE = require('events').EventEmitter
+
+function runTest(highWaterMark, objectMode, produce) {
+ const old = new EE()
+ const r = new Readable({
+ highWaterMark,
+ objectMode
+ })
+ assert.strictEqual(r, r.wrap(old))
+ r.on('end', common.mustCall())
+
+ old.pause = function () {
+ old.emit('pause')
+ flowing = false
+ }
+
+ old.resume = function () {
+ old.emit('resume')
+ flow()
+ } // Make sure pause is only emitted once.
+
+ let pausing = false
+ r.on('pause', () => {
+ assert.strictEqual(pausing, false)
+ pausing = true
+ process.nextTick(() => {
+ pausing = false
+ })
+ })
+ let flowing
+ let chunks = 10
+ let oldEnded = false
+ const expected = []
+
+ function flow() {
+ flowing = true
+
+ while (flowing && chunks-- > 0) {
+ const item = produce()
+ expected.push(item)
+ old.emit('data', item)
+ }
+
+ if (chunks <= 0) {
+ oldEnded = true
+ old.emit('end')
+ }
+ }
+
+ const w = new Writable({
+ highWaterMark: highWaterMark * 2,
+ objectMode
+ })
+ const written = []
+
+ w._write = function (chunk, encoding, cb) {
+ written.push(chunk)
+ setTimeout(cb, 1)
+ }
+
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ performAsserts()
+ })
+ )
+ r.pipe(w)
+ flow()
+
+ function performAsserts() {
+ assert(oldEnded)
+ assert.deepStrictEqual(written, expected)
+ }
+}
+
+runTest(100, false, function () {
+ return Buffer.allocUnsafe(100)
+})
+runTest(10, false, function () {
+ return Buffer.from('xxxxxxxxxx')
+})
+runTest(1, true, function () {
+ return {
+ foo: 'bar'
+ }
+})
+const objectChunks = [
+ 5,
+ 'a',
+ false,
+ 0,
+ '',
+ 'xyz',
+ {
+ x: 4
+ },
+ 7,
+ [],
+ 555
+]
+runTest(1, true, function () {
+ return objectChunks.shift()
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-set-encoding.js b/test/parallel/test-stream2-set-encoding.js
index d1c7c8f693..59194e4bda 100644
--- a/test/parallel/test-stream2-set-encoding.js
+++ b/test/parallel/test-stream2-set-encoding.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,250 +18,339 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
-
-var assert = require('assert/');
-
-var R = require('../../lib/_stream_readable');
-
-var TestReader =
-/*#__PURE__*/
-function (_R) {
- _inherits(TestReader, _R);
+ error() {}
+}
+const common = require('../common')
- function TestReader(n, opts) {
- var _this;
+const assert = require('assert')
- _classCallCheck(this, TestReader);
+const { Readable: R } = require('../../lib/ours/index')
- _this = _possibleConstructorReturn(this, _getPrototypeOf(TestReader).call(this, opts));
- _this.pos = 0;
- _this.len = n || 100;
- return _this;
+class TestReader extends R {
+ constructor(n, opts) {
+ super(opts)
+ this.pos = 0
+ this.len = n || 100
}
- _createClass(TestReader, [{
- key: "_read",
- value: function _read(n) {
- var _this2 = this;
-
- setTimeout(function () {
- if (_this2.pos >= _this2.len) {
- // double push(null) to test eos handling
- _this2.push(null);
-
- return _this2.push(null);
- }
-
- n = Math.min(n, _this2.len - _this2.pos);
-
- if (n <= 0) {
- // double push(null) to test eos handling
- _this2.push(null);
-
- return _this2.push(null);
- }
-
- _this2.pos += n;
- var ret = bufferShim.alloc(n, 'a');
- return _this2.push(ret);
- }, 1);
- }
- }]);
-
- return TestReader;
-}(R);
+ _read(n) {
+ setTimeout(() => {
+ if (this.pos >= this.len) {
+ // Double push(null) to test eos handling
+ this.push(null)
+ return this.push(null)
+ }
+
+ n = Math.min(n, this.len - this.pos)
+
+ if (n <= 0) {
+ // Double push(null) to test eos handling
+ this.push(null)
+ return this.push(null)
+ }
+
+ this.pos += n
+ const ret = Buffer.alloc(n, 'a')
+ return this.push(ret)
+ }, 1)
+ }
+}
{
// Verify utf8 encoding
- var tr = new TestReader(100);
- tr.setEncoding('utf8');
- var out = [];
- var expect = ['aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa'];
+ const tr = new TestReader(100)
+ tr.setEncoding('utf8')
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
tr.on('readable', function flow() {
- var chunk;
-
- while (null !== (chunk = tr.read(10))) {
- out.push(chunk);
- }
- });
- tr.on('end', common.mustCall(function () {
- assert.deepStrictEqual(out, expect);
- }));
+ let chunk
+
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
}
{
// Verify hex encoding
- var _tr = new TestReader(100);
-
- _tr.setEncoding('hex');
-
- var _out = [];
- var _expect = ['6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161'];
-
- _tr.on('readable', function flow() {
- var chunk;
-
- while (null !== (chunk = _tr.read(10))) {
- _out.push(chunk);
- }
- });
-
- _tr.on('end', common.mustCall(function () {
- assert.deepStrictEqual(_out, _expect);
- }));
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
}
{
// Verify hex encoding with read(13)
- var _tr2 = new TestReader(100);
-
- _tr2.setEncoding('hex');
-
- var _out2 = [];
- var _expect2 = ['6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '16161'];
-
- _tr2.on('readable', function flow() {
- var chunk;
-
- while (null !== (chunk = _tr2.read(13))) {
- _out2.push(chunk);
- }
- });
-
- _tr2.on('end', common.mustCall(function () {
- assert.deepStrictEqual(_out2, _expect2);
- }));
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+
+ while (null !== (chunk = tr.read(13))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
}
{
// Verify base64 encoding
- var _tr3 = new TestReader(100);
-
- _tr3.setEncoding('base64');
-
- var _out3 = [];
- var _expect3 = ['YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYQ=='];
-
- _tr3.on('readable', function flow() {
- var chunk;
-
- while (null !== (chunk = _tr3.read(10))) {
- _out3.push(chunk);
- }
- });
-
- _tr3.on('end', common.mustCall(function () {
- assert.deepStrictEqual(_out3, _expect3);
- }));
+ const tr = new TestReader(100)
+ tr.setEncoding('base64')
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
}
{
// Verify utf8 encoding
- var _tr4 = new TestReader(100, {
+ const tr = new TestReader(100, {
encoding: 'utf8'
- });
-
- var _out4 = [];
- var _expect4 = ['aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa'];
-
- _tr4.on('readable', function flow() {
- var chunk;
-
- while (null !== (chunk = _tr4.read(10))) {
- _out4.push(chunk);
- }
- });
-
- _tr4.on('end', common.mustCall(function () {
- assert.deepStrictEqual(_out4, _expect4);
- }));
+ })
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
}
{
// Verify hex encoding
- var _tr5 = new TestReader(100, {
+ const tr = new TestReader(100, {
encoding: 'hex'
- });
-
- var _out5 = [];
- var _expect5 = ['6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161'];
-
- _tr5.on('readable', function flow() {
- var chunk;
-
- while (null !== (chunk = _tr5.read(10))) {
- _out5.push(chunk);
- }
- });
-
- _tr5.on('end', common.mustCall(function () {
- assert.deepStrictEqual(_out5, _expect5);
- }));
+ })
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
}
{
// Verify hex encoding with read(13)
- var _tr6 = new TestReader(100, {
+ const tr = new TestReader(100, {
encoding: 'hex'
- });
-
- var _out6 = [];
- var _expect6 = ['6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '16161'];
-
- _tr6.on('readable', function flow() {
- var chunk;
-
- while (null !== (chunk = _tr6.read(13))) {
- _out6.push(chunk);
- }
- });
-
- _tr6.on('end', common.mustCall(function () {
- assert.deepStrictEqual(_out6, _expect6);
- }));
+ })
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+
+ while (null !== (chunk = tr.read(13))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
}
{
// Verify base64 encoding
- var _tr7 = new TestReader(100, {
+ const tr = new TestReader(100, {
encoding: 'base64'
- });
-
- var _out7 = [];
- var _expect7 = ['YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYQ=='];
-
- _tr7.on('readable', function flow() {
- var chunk;
-
- while (null !== (chunk = _tr7.read(10))) {
- _out7.push(chunk);
- }
- });
-
- _tr7.on('end', common.mustCall(function () {
- assert.deepStrictEqual(_out7, _expect7);
- }));
+ })
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
}
{
// Verify chaining behavior
- var _tr8 = new TestReader(100);
-
- assert.deepStrictEqual(_tr8.setEncoding('utf8'), _tr8);
+ const tr = new TestReader(100)
+ assert.deepStrictEqual(tr.setEncoding('utf8'), tr)
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
+/* replacement start */
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js
index 9bf6158c49..ac91527cdd 100644
--- a/test/parallel/test-stream2-transform.js
+++ b/test/parallel/test-stream2-transform.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,557 +18,496 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
+const tap = require('tap')
-var common = require('../common');
+const silentConsole = {
+ log() {},
-var assert = require('assert/');
+ error() {}
+}
+const common = require('../common')
-var PassThrough = require('../../lib/_stream_passthrough');
+const assert = require('assert')
-var Transform = require('../../lib/_stream_transform');
+const { PassThrough, Transform } = require('../../lib/ours/index')
{
// Verify writable side consumption
- var tx = new Transform({
+ const tx = new Transform({
highWaterMark: 10
- });
- var transformed = 0;
+ })
+ let transformed = 0
tx._transform = function (chunk, encoding, cb) {
- transformed += chunk.length;
- tx.push(chunk);
- cb();
- };
+ transformed += chunk.length
+ tx.push(chunk)
+ cb()
+ }
- for (var i = 1; i <= 10; i++) {
- tx.write(bufferShim.allocUnsafe(i));
+ for (let i = 1; i <= 10; i++) {
+ tx.write(Buffer.allocUnsafe(i))
}
- tx.end();
- assert.strictEqual(tx.readableLength, 10);
- assert.strictEqual(transformed, 10);
- assert.strictEqual(tx._transformState.writechunk.length, 5);
- assert.deepStrictEqual(tx.writableBuffer.map(function (c) {
- return c.chunk.length;
- }), [6, 7, 8, 9, 10]);
+ tx.end()
+ assert.strictEqual(tx.readableLength, 10)
+ assert.strictEqual(transformed, 10)
+ assert.deepStrictEqual(
+ tx.writableBuffer.map(function (c) {
+ return c.chunk.length
+ }),
+ [5, 6, 7, 8, 9, 10]
+ )
}
{
// Verify passthrough behavior
- var pt = new PassThrough();
- pt.write(bufferShim.from('foog'));
- pt.write(bufferShim.from('bark'));
- pt.write(bufferShim.from('bazy'));
- pt.write(bufferShim.from('kuel'));
- pt.end();
- assert.strictEqual(pt.read(5).toString(), 'foogb');
- assert.strictEqual(pt.read(5).toString(), 'arkba');
- assert.strictEqual(pt.read(5).toString(), 'zykue');
- assert.strictEqual(pt.read(5).toString(), 'l');
+ const pt = new PassThrough()
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ assert.strictEqual(pt.read(5).toString(), 'foogb')
+ assert.strictEqual(pt.read(5).toString(), 'arkba')
+ assert.strictEqual(pt.read(5).toString(), 'zykue')
+ assert.strictEqual(pt.read(5).toString(), 'l')
}
{
// Verify object passthrough behavior
- var _pt = new PassThrough({
+ const pt = new PassThrough({
objectMode: true
- });
-
- _pt.write(1);
-
- _pt.write(true);
-
- _pt.write(false);
-
- _pt.write(0);
-
- _pt.write('foo');
-
- _pt.write('');
-
- _pt.write({
+ })
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({
a: 'b'
- });
-
- _pt.end();
-
- assert.strictEqual(_pt.read(), 1);
- assert.strictEqual(_pt.read(), true);
- assert.strictEqual(_pt.read(), false);
- assert.strictEqual(_pt.read(), 0);
- assert.strictEqual(_pt.read(), 'foo');
- assert.strictEqual(_pt.read(), '');
- assert.deepStrictEqual(_pt.read(), {
+ })
+ pt.end()
+ assert.strictEqual(pt.read(), 1)
+ assert.strictEqual(pt.read(), true)
+ assert.strictEqual(pt.read(), false)
+ assert.strictEqual(pt.read(), 0)
+ assert.strictEqual(pt.read(), 'foo')
+ assert.strictEqual(pt.read(), '')
+ assert.deepStrictEqual(pt.read(), {
a: 'b'
- });
+ })
}
{
// Verify passthrough constructor behavior
- var _pt2 = PassThrough();
-
- assert(_pt2 instanceof PassThrough);
+ const pt = PassThrough()
+ assert(pt instanceof PassThrough)
}
{
// Verify transform constructor behavior
- var _pt3 = Transform();
-
- assert(_pt3 instanceof Transform);
+ const pt = Transform()
+ assert(pt instanceof Transform)
}
{
// Perform a simple transform
- var _pt4 = new Transform();
+ const pt = new Transform()
- _pt4._transform = function (c, e, cb) {
- var ret = bufferShim.alloc(c.length, 'x');
-
- _pt4.push(ret);
-
- cb();
- };
-
- _pt4.write(bufferShim.from('foog'));
-
- _pt4.write(bufferShim.from('bark'));
-
- _pt4.write(bufferShim.from('bazy'));
-
- _pt4.write(bufferShim.from('kuel'));
-
- _pt4.end();
+ pt._transform = function (c, e, cb) {
+ const ret = Buffer.alloc(c.length, 'x')
+ pt.push(ret)
+ cb()
+ }
- assert.strictEqual(_pt4.read(5).toString(), 'xxxxx');
- assert.strictEqual(_pt4.read(5).toString(), 'xxxxx');
- assert.strictEqual(_pt4.read(5).toString(), 'xxxxx');
- assert.strictEqual(_pt4.read(5).toString(), 'x');
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ assert.strictEqual(pt.read(5).toString(), 'xxxxx')
+ assert.strictEqual(pt.read(5).toString(), 'xxxxx')
+ assert.strictEqual(pt.read(5).toString(), 'xxxxx')
+ assert.strictEqual(pt.read(5).toString(), 'x')
}
{
// Verify simple object transform
- var _pt5 = new Transform({
+ const pt = new Transform({
objectMode: true
- });
-
- _pt5._transform = function (c, e, cb) {
- _pt5.push(JSON.stringify(c));
-
- cb();
- };
-
- _pt5.write(1);
-
- _pt5.write(true);
-
- _pt5.write(false);
-
- _pt5.write(0);
-
- _pt5.write('foo');
+ })
- _pt5.write('');
+ pt._transform = function (c, e, cb) {
+ pt.push(JSON.stringify(c))
+ cb()
+ }
- _pt5.write({
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({
a: 'b'
- });
-
- _pt5.end();
-
- assert.strictEqual(_pt5.read(), '1');
- assert.strictEqual(_pt5.read(), 'true');
- assert.strictEqual(_pt5.read(), 'false');
- assert.strictEqual(_pt5.read(), '0');
- assert.strictEqual(_pt5.read(), '"foo"');
- assert.strictEqual(_pt5.read(), '""');
- assert.strictEqual(_pt5.read(), '{"a":"b"}');
+ })
+ pt.end()
+ assert.strictEqual(pt.read(), '1')
+ assert.strictEqual(pt.read(), 'true')
+ assert.strictEqual(pt.read(), 'false')
+ assert.strictEqual(pt.read(), '0')
+ assert.strictEqual(pt.read(), '"foo"')
+ assert.strictEqual(pt.read(), '""')
+ assert.strictEqual(pt.read(), '{"a":"b"}')
}
{
// Verify async passthrough
- var _pt6 = new Transform();
+ const pt = new Transform()
- _pt6._transform = function (chunk, encoding, cb) {
+ pt._transform = function (chunk, encoding, cb) {
setTimeout(function () {
- _pt6.push(chunk);
-
- cb();
- }, 10);
- };
-
- _pt6.write(bufferShim.from('foog'));
-
- _pt6.write(bufferShim.from('bark'));
-
- _pt6.write(bufferShim.from('bazy'));
-
- _pt6.write(bufferShim.from('kuel'));
-
- _pt6.end();
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }
- _pt6.on('finish', common.mustCall(function () {
- assert.strictEqual(_pt6.read(5).toString(), 'foogb');
- assert.strictEqual(_pt6.read(5).toString(), 'arkba');
- assert.strictEqual(_pt6.read(5).toString(), 'zykue');
- assert.strictEqual(_pt6.read(5).toString(), 'l');
- }));
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ pt.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'foogb')
+ assert.strictEqual(pt.read(5).toString(), 'arkba')
+ assert.strictEqual(pt.read(5).toString(), 'zykue')
+ assert.strictEqual(pt.read(5).toString(), 'l')
+ })
+ )
}
{
// Verify asymmetric transform (expand)
- var _pt7 = new Transform(); // emit each chunk 2 times.
+ const pt = new Transform() // Emit each chunk 2 times.
-
- _pt7._transform = function (chunk, encoding, cb) {
+ pt._transform = function (chunk, encoding, cb) {
setTimeout(function () {
- _pt7.push(chunk);
-
+ pt.push(chunk)
setTimeout(function () {
- _pt7.push(chunk);
-
- cb();
- }, 10);
- }, 10);
- };
-
- _pt7.write(bufferShim.from('foog'));
-
- _pt7.write(bufferShim.from('bark'));
-
- _pt7.write(bufferShim.from('bazy'));
-
- _pt7.write(bufferShim.from('kuel'));
-
- _pt7.end();
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }, 10)
+ }
- _pt7.on('finish', common.mustCall(function () {
- assert.strictEqual(_pt7.read(5).toString(), 'foogf');
- assert.strictEqual(_pt7.read(5).toString(), 'oogba');
- assert.strictEqual(_pt7.read(5).toString(), 'rkbar');
- assert.strictEqual(_pt7.read(5).toString(), 'kbazy');
- assert.strictEqual(_pt7.read(5).toString(), 'bazyk');
- assert.strictEqual(_pt7.read(5).toString(), 'uelku');
- assert.strictEqual(_pt7.read(5).toString(), 'el');
- }));
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ pt.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'foogf')
+ assert.strictEqual(pt.read(5).toString(), 'oogba')
+ assert.strictEqual(pt.read(5).toString(), 'rkbar')
+ assert.strictEqual(pt.read(5).toString(), 'kbazy')
+ assert.strictEqual(pt.read(5).toString(), 'bazyk')
+ assert.strictEqual(pt.read(5).toString(), 'uelku')
+ assert.strictEqual(pt.read(5).toString(), 'el')
+ })
+ )
}
{
// Verify asymmetric transform (compress)
- var _pt8 = new Transform(); // each output is the first char of 3 consecutive chunks,
+ const pt = new Transform() // Each output is the first char of 3 consecutive chunks,
// or whatever's left.
+ pt.state = ''
- _pt8.state = '';
-
- _pt8._transform = function (chunk, encoding, cb) {
- var _this = this;
-
- if (!chunk) chunk = '';
- var s = chunk.toString();
- setTimeout(function () {
- _this.state += s.charAt(0);
-
- if (_this.state.length === 3) {
- _pt8.push(bufferShim.from(_this.state));
+ pt._transform = function (chunk, encoding, cb) {
+ if (!chunk) chunk = ''
+ const s = chunk.toString()
+ setTimeout(() => {
+ this.state += s.charAt(0)
- _this.state = '';
+ if (this.state.length === 3) {
+ pt.push(Buffer.from(this.state))
+ this.state = ''
}
- cb();
- }, 10);
- };
-
- _pt8._flush = function (cb) {
- // just output whatever we have.
- _pt8.push(bufferShim.from(this.state));
-
- this.state = '';
- cb();
- };
-
- _pt8.write(bufferShim.from('aaaa'));
-
- _pt8.write(bufferShim.from('bbbb'));
-
- _pt8.write(bufferShim.from('cccc'));
-
- _pt8.write(bufferShim.from('dddd'));
-
- _pt8.write(bufferShim.from('eeee'));
-
- _pt8.write(bufferShim.from('aaaa'));
-
- _pt8.write(bufferShim.from('bbbb'));
-
- _pt8.write(bufferShim.from('cccc'));
-
- _pt8.write(bufferShim.from('dddd'));
-
- _pt8.write(bufferShim.from('eeee'));
-
- _pt8.write(bufferShim.from('aaaa'));
-
- _pt8.write(bufferShim.from('bbbb'));
-
- _pt8.write(bufferShim.from('cccc'));
-
- _pt8.write(bufferShim.from('dddd'));
-
- _pt8.end(); // 'abcdeabcdeabcd'
+ cb()
+ }, 10)
+ }
+ pt._flush = function (cb) {
+ // Just output whatever we have.
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ cb()
+ }
- _pt8.on('finish', common.mustCall(function () {
- assert.strictEqual(_pt8.read(5).toString(), 'abcde');
- assert.strictEqual(_pt8.read(5).toString(), 'abcde');
- assert.strictEqual(_pt8.read(5).toString(), 'abcd');
- }));
-} // this tests for a stall when data is written to a full stream
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.end() // 'abcdeabcdeabcd'
+
+ pt.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'abcde')
+ assert.strictEqual(pt.read(5).toString(), 'abcde')
+ assert.strictEqual(pt.read(5).toString(), 'abcd')
+ })
+ )
+} // This tests for a stall when data is written to a full stream
// that has empty transforms.
{
// Verify complex transform behavior
- var count = 0;
- var saved = null;
-
- var _pt9 = new Transform({
+ let count = 0
+ let saved = null
+ const pt = new Transform({
highWaterMark: 3
- });
+ })
- _pt9._transform = function (c, e, cb) {
- if (count++ === 1) saved = c;else {
+ pt._transform = function (c, e, cb) {
+ if (count++ === 1) saved = c
+ else {
if (saved) {
- _pt9.push(saved);
-
- saved = null;
+ pt.push(saved)
+ saved = null
}
- _pt9.push(c);
+ pt.push(c)
}
- cb();
- };
+ cb()
+ }
- _pt9.once('readable', function () {
+ pt.once('readable', function () {
process.nextTick(function () {
- _pt9.write(bufferShim.from('d'));
-
- _pt9.write(bufferShim.from('ef'), common.mustCall(function () {
- _pt9.end();
- }));
-
- assert.strictEqual(_pt9.read().toString(), 'abcdef');
- assert.strictEqual(_pt9.read(), null);
- });
- });
-
- _pt9.write(bufferShim.from('abc'));
+ pt.write(Buffer.from('d'))
+ pt.write(
+ Buffer.from('ef'),
+ common.mustCall(function () {
+ pt.end()
+ })
+ )
+ assert.strictEqual(pt.read().toString(), 'abcdef')
+ assert.strictEqual(pt.read(), null)
+ })
+ })
+ pt.write(Buffer.from('abc'))
}
{
// Verify passthrough event emission
- var _pt10 = new PassThrough();
-
- var emits = 0;
-
- _pt10.on('readable', function () {
- emits++;
- });
-
- _pt10.write(bufferShim.from('foog'));
-
- _pt10.write(bufferShim.from('bark'));
-
- assert.strictEqual(emits, 0);
- assert.strictEqual(_pt10.read(5).toString(), 'foogb');
- assert.strictEqual(String(_pt10.read(5)), 'null');
- assert.strictEqual(emits, 0);
-
- _pt10.write(bufferShim.from('bazy'));
-
- _pt10.write(bufferShim.from('kuel'));
-
- assert.strictEqual(emits, 0);
- assert.strictEqual(_pt10.read(5).toString(), 'arkba');
- assert.strictEqual(_pt10.read(5).toString(), 'zykue');
- assert.strictEqual(_pt10.read(5), null);
-
- _pt10.end();
-
- assert.strictEqual(emits, 1);
- assert.strictEqual(_pt10.read(5).toString(), 'l');
- assert.strictEqual(_pt10.read(5), null);
- assert.strictEqual(emits, 1);
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ emits++
+ })
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ assert.strictEqual(emits, 0)
+ assert.strictEqual(pt.read(5).toString(), 'foogb')
+ assert.strictEqual(String(pt.read(5)), 'null')
+ assert.strictEqual(emits, 0)
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ assert.strictEqual(emits, 0)
+ assert.strictEqual(pt.read(5).toString(), 'arkba')
+ assert.strictEqual(pt.read(5).toString(), 'zykue')
+ assert.strictEqual(pt.read(5), null)
+ pt.end()
+ assert.strictEqual(emits, 1)
+ assert.strictEqual(pt.read(5).toString(), 'l')
+ assert.strictEqual(pt.read(5), null)
+ assert.strictEqual(emits, 1)
}
{
// Verify passthrough event emission reordering
- var _pt11 = new PassThrough();
-
- var _emits = 0;
-
- _pt11.on('readable', function () {
- _emits++;
- });
-
- _pt11.write(bufferShim.from('foog'));
-
- _pt11.write(bufferShim.from('bark'));
-
- assert.strictEqual(_emits, 0);
- assert.strictEqual(_pt11.read(5).toString(), 'foogb');
- assert.strictEqual(_pt11.read(5), null);
-
- _pt11.once('readable', common.mustCall(function () {
- assert.strictEqual(_pt11.read(5).toString(), 'arkba');
- assert.strictEqual(_pt11.read(5), null);
-
- _pt11.once('readable', common.mustCall(function () {
- assert.strictEqual(_pt11.read(5).toString(), 'zykue');
- assert.strictEqual(_pt11.read(5), null);
-
- _pt11.once('readable', common.mustCall(function () {
- assert.strictEqual(_pt11.read(5).toString(), 'l');
- assert.strictEqual(_pt11.read(5), null);
- assert.strictEqual(_emits, 3);
- }));
-
- _pt11.end();
- }));
-
- _pt11.write(bufferShim.from('kuel'));
- }));
-
- _pt11.write(bufferShim.from('bazy'));
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ emits++
+ })
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ assert.strictEqual(emits, 0)
+ assert.strictEqual(pt.read(5).toString(), 'foogb')
+ assert.strictEqual(pt.read(5), null)
+ pt.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'arkba')
+ assert.strictEqual(pt.read(5), null)
+ pt.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'zykue')
+ assert.strictEqual(pt.read(5), null)
+ pt.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'l')
+ assert.strictEqual(pt.read(5), null)
+ assert.strictEqual(emits, 3)
+ })
+ )
+ pt.end()
+ })
+ )
+ pt.write(Buffer.from('kuel'))
+ })
+ )
+ pt.write(Buffer.from('bazy'))
}
{
// Verify passthrough facade
- var _pt12 = new PassThrough();
-
- var datas = [];
-
- _pt12.on('data', function (chunk) {
- datas.push(chunk.toString());
- });
-
- _pt12.on('end', common.mustCall(function () {
- assert.deepStrictEqual(datas, ['foog', 'bark', 'bazy', 'kuel']);
- }));
-
- _pt12.write(bufferShim.from('foog'));
-
+ const pt = new PassThrough()
+ const datas = []
+ pt.on('data', function (chunk) {
+ datas.push(chunk.toString())
+ })
+ pt.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(datas, ['foog', 'bark', 'bazy', 'kuel'])
+ })
+ )
+ pt.write(Buffer.from('foog'))
setTimeout(function () {
- _pt12.write(bufferShim.from('bark'));
-
+ pt.write(Buffer.from('bark'))
setTimeout(function () {
- _pt12.write(bufferShim.from('bazy'));
-
+ pt.write(Buffer.from('bazy'))
setTimeout(function () {
- _pt12.write(bufferShim.from('kuel'));
-
+ pt.write(Buffer.from('kuel'))
setTimeout(function () {
- _pt12.end();
- }, 10);
- }, 10);
- }, 10);
- }, 10);
+ pt.end()
+ }, 10)
+ }, 10)
+ }, 10)
+ }, 10)
}
{
// Verify object transform (JSON parse)
- var jp = new Transform({
+ const jp = new Transform({
objectMode: true
- });
+ })
jp._transform = function (data, encoding, cb) {
try {
- jp.push(JSON.parse(data));
- cb();
+ jp.push(JSON.parse(data))
+ cb()
} catch (er) {
- cb(er);
+ cb(er)
}
- }; // anything except null/undefined is fine.
+ } // Anything except null/undefined is fine.
// those are "magic" in the stream API, because they signal EOF.
-
- var objects = [{
- foo: 'bar'
- }, 100, 'string', {
- nested: {
- things: [{
- foo: 'bar'
- }, 100, 'string']
+ const objects = [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string',
+ {
+ nested: {
+ things: [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string'
+ ]
+ }
}
- }];
- var ended = false;
+ ]
+ let ended = false
jp.on('end', function () {
- ended = true;
- });
- forEach(objects, function (obj) {
- jp.write(JSON.stringify(obj));
- var res = jp.read();
- assert.deepStrictEqual(res, obj);
- });
- jp.end(); // read one more time to get the 'end' event
-
- jp.read();
- process.nextTick(common.mustCall(function () {
- assert.strictEqual(ended, true);
- }));
+ ended = true
+ })
+ objects.forEach(function (obj) {
+ jp.write(JSON.stringify(obj))
+ const res = jp.read()
+ assert.deepStrictEqual(res, obj)
+ })
+ jp.end() // Read one more time to get the 'end' event
+
+ jp.read()
+ process.nextTick(
+ common.mustCall(function () {
+ assert.strictEqual(ended, true)
+ })
+ )
}
{
// Verify object transform (JSON stringify)
- var js = new Transform({
+ const js = new Transform({
objectMode: true
- });
+ })
js._transform = function (data, encoding, cb) {
try {
- js.push(JSON.stringify(data));
- cb();
+ js.push(JSON.stringify(data))
+ cb()
} catch (er) {
- cb(er);
+ cb(er)
}
- }; // anything except null/undefined is fine.
+ } // Anything except null/undefined is fine.
// those are "magic" in the stream API, because they signal EOF.
-
- var _objects = [{
- foo: 'bar'
- }, 100, 'string', {
- nested: {
- things: [{
- foo: 'bar'
- }, 100, 'string']
+ const objects = [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string',
+ {
+ nested: {
+ things: [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string'
+ ]
+ }
}
- }];
- var _ended = false;
+ ]
+ let ended = false
js.on('end', function () {
- _ended = true;
- });
- forEach(_objects, function (obj) {
- js.write(obj);
- var res = js.read();
- assert.strictEqual(res, JSON.stringify(obj));
- });
- js.end(); // read one more time to get the 'end' event
-
- js.read();
- process.nextTick(common.mustCall(function () {
- assert.strictEqual(_ended, true);
- }));
+ ended = true
+ })
+ objects.forEach(function (obj) {
+ js.write(obj)
+ const res = js.read()
+ assert.strictEqual(res, JSON.stringify(obj))
+ })
+ js.end() // Read one more time to get the 'end' event
+
+ js.read()
+ process.nextTick(
+ common.mustCall(function () {
+ assert.strictEqual(ended, true)
+ })
+ )
}
+/* replacement start */
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
}
-}
-
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-unpipe-drain.js b/test/parallel/test-stream2-unpipe-drain.js
index a2e8166cf6..1d2c64e3a5 100644
--- a/test/parallel/test-stream2-unpipe-drain.js
+++ b/test/parallel/test-stream2-unpipe-drain.js
@@ -1,134 +1,82 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
-(function () {
- // Copyright Joyent, Inc. and other Node contributors.
- //
- // Permission is hereby granted, free of charge, to any person obtaining a
- // copy of this software and associated documentation files (the
- // "Software"), to deal in the Software without restriction, including
- // without limitation the rights to use, copy, modify, merge, publish,
- // distribute, sublicense, and/or sell copies of the Software, and to permit
- // persons to whom the Software is furnished to do so, subject to the
- // following conditions:
- //
- // The above copyright notice and this permission notice shall be included
- // in all copies or substantial portions of the Software.
- //
- // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
- // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
- // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
- // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
- // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
- // USE OR OTHER DEALINGS IN THE SOFTWARE.
-
- /**/
- var bufferShim = require('safe-buffer').Buffer;
- /**/
-
-
- require('../common');
-
- var assert = require('assert/');
-
- var stream = require('../../');
-
- var TestWriter =
- /*#__PURE__*/
- function (_stream$Writable) {
- _inherits(TestWriter, _stream$Writable);
-
- function TestWriter() {
- _classCallCheck(this, TestWriter);
-
- return _possibleConstructorReturn(this, _getPrototypeOf(TestWriter).apply(this, arguments));
- }
-
- _createClass(TestWriter, [{
- key: "_write",
- value: function _write(buffer, encoding, callback) {
- console.log('write called'); // super slow write stream (callback never called)
- }
- }]);
-
- return TestWriter;
- }(stream.Writable);
-
- var dest = new TestWriter();
-
- var TestReader =
- /*#__PURE__*/
- function (_stream$Readable) {
- _inherits(TestReader, _stream$Readable);
-
- function TestReader() {
- var _this;
-
- _classCallCheck(this, TestReader);
-
- _this = _possibleConstructorReturn(this, _getPrototypeOf(TestReader).call(this));
- _this.reads = 0;
- return _this;
- }
-
- _createClass(TestReader, [{
- key: "_read",
- value: function _read(size) {
- this.reads += 1;
- this.push(bufferShim.alloc(size));
- }
- }]);
-
- return TestReader;
- }(stream.Readable);
-
- var src1 = new TestReader();
- var src2 = new TestReader();
- src1.pipe(dest);
- src1.once('readable', function () {
- process.nextTick(function () {
- src2.pipe(dest);
- src2.once('readable', function () {
- process.nextTick(function () {
- src1.unpipe(dest);
- });
- });
- });
- });
- process.on('exit', function () {
- assert.strictEqual(src1.reads, 2);
- assert.strictEqual(src2.reads, 2);
- });
-})();
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
+
+const tap = require('tap')
+
+const silentConsole = {
+ log() {},
+
+ error() {}
+}
+require('../common')
+
+const assert = require('assert')
+
+const stream = require('../../lib/ours/index')
+
+class TestWriter extends stream.Writable {
+ _write(buffer, encoding, callback) {
+ silentConsole.log('write called') // Super slow write stream (callback never called)
+ }
+}
+
+const dest = new TestWriter()
+
+class TestReader extends stream.Readable {
+ constructor() {
+ super()
+ this.reads = 0
+ }
+
+ _read(size) {
+ this.reads += 1
+ this.push(Buffer.alloc(size))
+ }
+}
+
+const src1 = new TestReader()
+const src2 = new TestReader()
+src1.pipe(dest)
+src1.once('readable', () => {
+ process.nextTick(() => {
+ src2.pipe(dest)
+ src2.once('readable', () => {
+ process.nextTick(() => {
+ src1.unpipe(dest)
+ })
+ })
+ })
+})
+process.on('exit', () => {
+ assert.strictEqual(src1.reads, 2)
+ assert.strictEqual(src2.reads, 2)
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-unpipe-leak.js b/test/parallel/test-stream2-unpipe-leak.js
index 93e26015d9..629c75592b 100644
--- a/test/parallel/test-stream2-unpipe-leak.js
+++ b/test/parallel/test-stream2-unpipe-leak.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,103 +18,72 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-require('../common');
+const tap = require('tap')
-var assert = require('assert/');
+const silentConsole = {
+ log() {},
-var stream = require('../../');
+ error() {}
+}
+require('../common')
-var chunk = bufferShim.from('hallo');
+const assert = require('assert')
-var TestWriter =
-/*#__PURE__*/
-function (_stream$Writable) {
- _inherits(TestWriter, _stream$Writable);
+const stream = require('../../lib/ours/index')
- function TestWriter() {
- _classCallCheck(this, TestWriter);
+const chunk = Buffer.from('hallo')
- return _possibleConstructorReturn(this, _getPrototypeOf(TestWriter).apply(this, arguments));
+class TestWriter extends stream.Writable {
+ _write(buffer, encoding, callback) {
+ callback(null)
}
+}
- _createClass(TestWriter, [{
- key: "_write",
- value: function _write(buffer, encoding, callback) {
- callback(null);
- }
- }]);
-
- return TestWriter;
-}(stream.Writable);
-
-var dest = new TestWriter(); // Set this high so that we'd trigger a nextTick warning
+const dest = new TestWriter() // Set this high so that we'd trigger a nextTick warning
// and/or RangeError if we do maybeReadMore wrong.
-var TestReader =
-/*#__PURE__*/
-function (_stream$Readable) {
- _inherits(TestReader, _stream$Readable);
-
- function TestReader() {
- _classCallCheck(this, TestReader);
-
- return _possibleConstructorReturn(this, _getPrototypeOf(TestReader).call(this, {
+class TestReader extends stream.Readable {
+ constructor() {
+ super({
highWaterMark: 0x10000
- }));
+ })
}
- _createClass(TestReader, [{
- key: "_read",
- value: function _read(size) {
- this.push(chunk);
- }
- }]);
-
- return TestReader;
-}(stream.Readable);
+ _read(size) {
+ this.push(chunk)
+ }
+}
-var src = new TestReader();
+const src = new TestReader()
-for (var i = 0; i < 10; i++) {
- src.pipe(dest);
- src.unpipe(dest);
+for (let i = 0; i < 10; i++) {
+ src.pipe(dest)
+ src.unpipe(dest)
}
-assert.strictEqual(src.listeners('end').length, 0);
-assert.strictEqual(src.listeners('readable').length, 0);
-assert.strictEqual(dest.listeners('unpipe').length, 0);
-assert.strictEqual(dest.listeners('drain').length, 0);
-assert.strictEqual(dest.listeners('error').length, 0);
-assert.strictEqual(dest.listeners('close').length, 0);
-assert.strictEqual(dest.listeners('finish').length, 0);
-console.error(src._readableState);
+assert.strictEqual(src.listeners('end').length, 0)
+assert.strictEqual(src.listeners('readable').length, 0)
+assert.strictEqual(dest.listeners('unpipe').length, 0)
+assert.strictEqual(dest.listeners('drain').length, 0)
+assert.strictEqual(dest.listeners('error').length, 0)
+assert.strictEqual(dest.listeners('close').length, 0)
+assert.strictEqual(dest.listeners('finish').length, 0)
+silentConsole.error(src._readableState)
process.on('exit', function () {
- src.readableBuffer.length = 0;
- console.error(src._readableState);
- assert(src.readableLength >= src.readableHighWaterMark);
-
- require('tap').pass();
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+ src.readableBuffer.length = 0
+ silentConsole.error(src._readableState)
+ assert(src.readableLength >= src.readableHighWaterMark)
+ silentConsole.log('ok')
+})
+/* replacement start */
+
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js
index 4d38254e8d..26808e2773 100644
--- a/test/parallel/test-stream2-writable.js
+++ b/test/parallel/test-stream2-writable.js
@@ -1,21 +1,3 @@
-"use strict";
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
-
-function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
-
-function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
-
-function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
-
-function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
-
-function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
-
-function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -36,430 +18,479 @@ function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || func
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
-
-
-var common = require('../common');
-
-var W = require('../../lib/_stream_writable');
+const tap = require('tap')
-var D = require('../../lib/_stream_duplex');
+const silentConsole = {
+ log() {},
-var assert = require('assert/');
-
-var TestWriter =
-/*#__PURE__*/
-function (_W) {
- _inherits(TestWriter, _W);
+ error() {}
+}
+const common = require('../common')
- function TestWriter(opts) {
- var _this;
+const { Writable: W, Duplex: D } = require('../../lib/ours/index')
- _classCallCheck(this, TestWriter);
+const assert = require('assert')
- _this = _possibleConstructorReturn(this, _getPrototypeOf(TestWriter).call(this, opts));
- _this.buffer = [];
- _this.written = 0;
- return _this;
+class TestWriter extends W {
+ constructor(opts) {
+ super(opts)
+ this.buffer = []
+ this.written = 0
}
- _createClass(TestWriter, [{
- key: "_write",
- value: function _write(chunk, encoding, cb) {
- var _this2 = this;
-
- // simulate a small unpredictable latency
- setTimeout(function () {
- _this2.buffer.push(chunk.toString());
-
- _this2.written += chunk.length;
- cb();
- }, Math.floor(Math.random() * 10));
- }
- }]);
-
- return TestWriter;
-}(W);
+ _write(chunk, encoding, cb) {
+ // Simulate a small unpredictable latency
+ setTimeout(() => {
+ this.buffer.push(chunk.toString())
+ this.written += chunk.length
+ cb()
+ }, Math.floor(Math.random() * 10))
+ }
+}
-var chunks = new Array(50);
+const chunks = new Array(50)
-for (var i = 0; i < chunks.length; i++) {
- chunks[i] = 'x'.repeat(i);
+for (let i = 0; i < chunks.length; i++) {
+ chunks[i] = 'x'.repeat(i)
}
{
// Verify fast writing
- var tw = new TestWriter({
+ const tw = new TestWriter({
highWaterMark: 100
- });
- tw.on('finish', common.mustCall(function () {
- // got chunks in the right order
- assert.deepStrictEqual(tw.buffer, chunks);
- }));
- forEach(chunks, function (chunk) {
+ })
+ tw.on(
+ 'finish',
+ common.mustCall(function () {
+ // Got chunks in the right order
+ assert.deepStrictEqual(tw.buffer, chunks)
+ })
+ )
+ chunks.forEach(function (chunk) {
// Ignore backpressure. Just buffer it all up.
- tw.write(chunk);
- });
- tw.end();
+ tw.write(chunk)
+ })
+ tw.end()
}
{
// Verify slow writing
- var _tw = new TestWriter({
+ const tw = new TestWriter({
highWaterMark: 100
- });
-
- _tw.on('finish', common.mustCall(function () {
- // got chunks in the right order
- assert.deepStrictEqual(_tw.buffer, chunks);
- }));
-
- var _i = 0;
-
- (function W() {
- _tw.write(chunks[_i++]);
-
- if (_i < chunks.length) setTimeout(W, 10);else _tw.end();
- })();
+ })
+ tw.on(
+ 'finish',
+ common.mustCall(function () {
+ // Got chunks in the right order
+ assert.deepStrictEqual(tw.buffer, chunks)
+ })
+ )
+ let i = 0
+
+ ;(function W() {
+ tw.write(chunks[i++])
+ if (i < chunks.length) setTimeout(W, 10)
+ else tw.end()
+ })()
}
{
// Verify write backpressure
- var _tw2 = new TestWriter({
+ const tw = new TestWriter({
highWaterMark: 50
- });
-
- var drains = 0;
-
- _tw2.on('finish', common.mustCall(function () {
- // got chunks in the right order
- assert.deepStrictEqual(_tw2.buffer, chunks);
- assert.strictEqual(drains, 17);
- }));
-
- _tw2.on('drain', function () {
- drains++;
- });
-
- var _i2 = 0;
-
- (function W() {
- var ret;
+ })
+ let drains = 0
+ tw.on(
+ 'finish',
+ common.mustCall(function () {
+ // Got chunks in the right order
+ assert.deepStrictEqual(tw.buffer, chunks)
+ assert.strictEqual(drains, 17)
+ })
+ )
+ tw.on('drain', function () {
+ drains++
+ })
+ let i = 0
+
+ ;(function W() {
+ let ret
do {
- ret = _tw2.write(chunks[_i2++]);
- } while (ret !== false && _i2 < chunks.length);
-
- if (_i2 < chunks.length) {
- assert(_tw2.writableLength >= 50);
+ ret = tw.write(chunks[i++])
+ } while (ret !== false && i < chunks.length)
- _tw2.once('drain', W);
+ if (i < chunks.length) {
+ assert(tw.writableLength >= 50)
+ tw.once('drain', W)
} else {
- _tw2.end();
+ tw.end()
}
- })();
+ })()
}
{
// Verify write buffersize
- var _tw3 = new TestWriter({
+ const tw = new TestWriter({
highWaterMark: 100
- });
-
- var encodings = ['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', undefined];
-
- _tw3.on('finish', function () {
- // got the expected chunks
- assert.deepStrictEqual(_tw3.buffer, chunks);
- });
-
- forEach(chunks, function (chunk, i) {
- var enc = encodings[i % encodings.length];
- chunk = bufferShim.from(chunk);
-
- _tw3.write(chunk.toString(enc), enc);
- });
+ })
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'latin1',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+ tw.on('finish', function () {
+ // Got the expected chunks
+ assert.deepStrictEqual(tw.buffer, chunks)
+ })
+ chunks.forEach(function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
}
{
// Verify write with no buffersize
- var _tw4 = new TestWriter({
+ const tw = new TestWriter({
highWaterMark: 100,
decodeStrings: false
- });
-
- _tw4._write = function (chunk, encoding, cb) {
- assert.strictEqual(typeof chunk, 'string');
- chunk = bufferShim.from(chunk, encoding);
- return TestWriter.prototype._write.call(this, chunk, encoding, cb);
- };
-
- var _encodings = ['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', undefined];
+ })
- _tw4.on('finish', function () {
- // got the expected chunks
- assert.deepStrictEqual(_tw4.buffer, chunks);
- });
-
- forEach(chunks, function (chunk, i) {
- var enc = _encodings[i % _encodings.length];
- chunk = bufferShim.from(chunk);
+ tw._write = function (chunk, encoding, cb) {
+ assert.strictEqual(typeof chunk, 'string')
+ chunk = Buffer.from(chunk, encoding)
+ return TestWriter.prototype._write.call(this, chunk, encoding, cb)
+ }
- _tw4.write(chunk.toString(enc), enc);
- });
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'latin1',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+ tw.on('finish', function () {
+ // Got the expected chunks
+ assert.deepStrictEqual(tw.buffer, chunks)
+ })
+ chunks.forEach(function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
}
{
// Verify write callbacks
- var callbacks = chunks.map(function (chunk, i) {
- return [i, function () {
- callbacks._called[i] = chunk;
- }];
- }).reduce(function (set, x) {
- set["callback-".concat(x[0])] = x[1];
- return set;
- }, {});
- callbacks._called = [];
-
- var _tw5 = new TestWriter({
+ const callbacks = chunks
+ .map(function (chunk, i) {
+ return [
+ i,
+ function () {
+ callbacks._called[i] = chunk
+ }
+ ]
+ })
+ .reduce(function (set, x) {
+ set[`callback-${x[0]}`] = x[1]
+ return set
+ }, {})
+ callbacks._called = []
+ const tw = new TestWriter({
highWaterMark: 100
- });
-
- _tw5.on('finish', common.mustCall(function () {
- process.nextTick(common.mustCall(function () {
- // got chunks in the right order
- assert.deepStrictEqual(_tw5.buffer, chunks); // called all callbacks
-
- assert.deepStrictEqual(callbacks._called, chunks);
- }));
- }));
-
- forEach(chunks, function (chunk, i) {
- _tw5.write(chunk, callbacks["callback-".concat(i)]);
- });
-
- _tw5.end();
+ })
+ tw.on(
+ 'finish',
+ common.mustCall(function () {
+ process.nextTick(
+ common.mustCall(function () {
+ // Got chunks in the right order
+ assert.deepStrictEqual(tw.buffer, chunks) // Called all callbacks
+
+ assert.deepStrictEqual(callbacks._called, chunks)
+ })
+ )
+ })
+ )
+ chunks.forEach(function (chunk, i) {
+ tw.write(chunk, callbacks[`callback-${i}`])
+ })
+ tw.end()
}
{
// Verify end() callback
- var _tw6 = new TestWriter();
-
- _tw6.end(common.mustCall());
+ const tw = new TestWriter()
+ tw.end(common.mustCall())
}
+const helloWorldBuffer = Buffer.from('hello world')
{
// Verify end() callback with chunk
- var _tw7 = new TestWriter();
-
- _tw7.end(bufferShim.from('hello world'), common.mustCall());
+ const tw = new TestWriter()
+ tw.end(helloWorldBuffer, common.mustCall())
}
{
// Verify end() callback with chunk and encoding
- var _tw8 = new TestWriter();
-
- _tw8.end('hello world', 'ascii', common.mustCall());
+ const tw = new TestWriter()
+ tw.end('hello world', 'ascii', common.mustCall())
}
{
// Verify end() callback after write() call
- var _tw9 = new TestWriter();
-
- _tw9.write(bufferShim.from('hello world'));
-
- _tw9.end(common.mustCall());
+ const tw = new TestWriter()
+ tw.write(helloWorldBuffer)
+ tw.end(common.mustCall())
}
{
// Verify end() callback after write() callback
- var _tw10 = new TestWriter();
-
- var writeCalledback = false;
-
- _tw10.write(bufferShim.from('hello world'), function () {
- writeCalledback = true;
- });
-
- _tw10.end(common.mustCall(function () {
- assert.strictEqual(writeCalledback, true);
- }));
+ const tw = new TestWriter()
+ let writeCalledback = false
+ tw.write(helloWorldBuffer, function () {
+ writeCalledback = true
+ })
+ tw.end(
+ common.mustCall(function () {
+ assert.strictEqual(writeCalledback, true)
+ })
+ )
}
{
// Verify encoding is ignored for buffers
- var _tw11 = new W();
-
- var hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb';
- _tw11._write = common.mustCall(function (chunk) {
- assert.strictEqual(chunk.toString('hex'), hex);
- });
- var buf = bufferShim.from(hex, 'hex');
-
- _tw11.write(buf, 'latin1');
+ const tw = new W()
+ const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'
+ tw._write = common.mustCall(function (chunk) {
+ assert.strictEqual(chunk.toString('hex'), hex)
+ })
+ const buf = Buffer.from(hex, 'hex')
+ tw.write(buf, 'latin1')
}
{
// Verify writables cannot be piped
- var w = new W();
- w._write = common.mustNotCall();
- var gotError = false;
+ const w = new W({
+ autoDestroy: false
+ })
+ w._write = common.mustNotCall()
+ let gotError = false
w.on('error', function () {
- gotError = true;
- });
- w.pipe(process.stdout);
- assert.strictEqual(gotError, true);
+ gotError = true
+ })
+ w.pipe(process.stdout)
+ assert.strictEqual(gotError, true)
}
{
// Verify that duplex streams cannot be piped
- var d = new D();
- d._read = common.mustCall();
- d._write = common.mustNotCall();
- var _gotError = false;
+ const d = new D()
+ d._read = common.mustCall()
+ d._write = common.mustNotCall()
+ let gotError = false
d.on('error', function () {
- _gotError = true;
- });
- d.pipe(process.stdout);
- assert.strictEqual(_gotError, false);
+ gotError = true
+ })
+ d.pipe(process.stdout)
+ assert.strictEqual(gotError, false)
}
{
// Verify that end(chunk) twice is an error
- var _w = new W();
-
- _w._write = common.mustCall(function (msg) {
- assert.strictEqual(msg.toString(), 'this is the end');
- });
- var _gotError2 = false;
-
- _w.on('error', function (er) {
- _gotError2 = true;
- assert.strictEqual(er.message, 'write after end');
- });
-
- _w.end('this is the end');
-
- _w.end('and so is this');
-
- process.nextTick(common.mustCall(function () {
- assert.strictEqual(_gotError2, true);
- }));
+ const w = new W()
+ w._write = common.mustCall((msg) => {
+ assert.strictEqual(msg.toString(), 'this is the end')
+ })
+ let gotError = false
+ w.on('error', function (er) {
+ gotError = true
+ assert.strictEqual(er.message, 'write after end')
+ })
+ w.end('this is the end')
+ w.end('and so is this')
+ process.nextTick(
+ common.mustCall(function () {
+ assert.strictEqual(gotError, true)
+ })
+ )
}
{
// Verify stream doesn't end while writing
- var _w2 = new W();
-
- var wrote = false;
-
- _w2._write = function (chunk, e, cb) {
- assert.strictEqual(this.writing, undefined);
- wrote = true;
- this.writing = true;
- setTimeout(function () {
- this.writing = false;
- cb();
- }, 1);
- };
-
- _w2.on('finish', common.mustCall(function () {
- assert.strictEqual(wrote, true);
- }));
-
- _w2.write(bufferShim.alloc(0));
+ const w = new W()
+ let wrote = false
+
+ w._write = function (chunk, e, cb) {
+ assert.strictEqual(this.writing, undefined)
+ wrote = true
+ this.writing = true
+ setTimeout(() => {
+ this.writing = false
+ cb()
+ }, 1)
+ }
- _w2.end();
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(wrote, true)
+ assert.strictEqual(this.writing, false)
+ })
+ )
+ w.write(Buffer.alloc(0))
+ w.end()
}
{
// Verify finish does not come before write() callback
- var _w3 = new W();
-
- var writeCb = false;
+ const w = new W()
+ let writeCb = false
- _w3._write = function (chunk, e, cb) {
+ w._write = function (chunk, e, cb) {
setTimeout(function () {
- writeCb = true;
- cb();
- }, 10);
- };
-
- _w3.on('finish', common.mustCall(function () {
- assert.strictEqual(writeCb, true);
- }));
-
- _w3.write(bufferShim.alloc(0));
+ writeCb = true
+ cb()
+ }, 10)
+ }
- _w3.end();
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(writeCb, true)
+ })
+ )
+ w.write(Buffer.alloc(0))
+ w.end()
}
{
// Verify finish does not come before synchronous _write() callback
- var _w4 = new W();
-
- var _writeCb = false;
+ const w = new W()
+ let writeCb = false
- _w4._write = function (chunk, e, cb) {
- cb();
- };
-
- _w4.on('finish', common.mustCall(function () {
- assert.strictEqual(_writeCb, true);
- }));
-
- _w4.write(bufferShim.alloc(0), function () {
- _writeCb = true;
- });
+ w._write = function (chunk, e, cb) {
+ cb()
+ }
- _w4.end();
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(writeCb, true)
+ })
+ )
+ w.write(Buffer.alloc(0), function () {
+ writeCb = true
+ })
+ w.end()
}
{
// Verify finish is emitted if the last chunk is empty
- var _w5 = new W();
+ const w = new W()
- _w5._write = function (chunk, e, cb) {
- process.nextTick(cb);
- };
-
- _w5.on('finish', common.mustCall());
-
- _w5.write(bufferShim.allocUnsafe(1));
+ w._write = function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
- _w5.end(bufferShim.alloc(0));
+ w.on('finish', common.mustCall())
+ w.write(Buffer.allocUnsafe(1))
+ w.end(Buffer.alloc(0))
}
{
// Verify that finish is emitted after shutdown
- var _w6 = new W();
-
- var shutdown = false;
- _w6._final = common.mustCall(function (cb) {
- assert.strictEqual(this, _w6);
+ const w = new W()
+ let shutdown = false
+ w._final = common.mustCall(function (cb) {
+ assert.strictEqual(this, w)
setTimeout(function () {
- shutdown = true;
- cb();
- }, 100);
- });
-
- _w6._write = function (chunk, e, cb) {
- process.nextTick(cb);
- };
-
- _w6.on('finish', common.mustCall(function () {
- assert.strictEqual(shutdown, true);
- }));
+ shutdown = true
+ cb()
+ }, 100)
+ })
- _w6.write(bufferShim.allocUnsafe(1));
+ w._write = function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
- _w6.end(bufferShim.allocUnsafe(0));
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(shutdown, true)
+ })
+ )
+ w.write(Buffer.allocUnsafe(1))
+ w.end(Buffer.allocUnsafe(0))
}
-
-function forEach(xs, f) {
- for (var i = 0, l = xs.length; i < l; i++) {
- f(xs[i], i);
- }
+{
+ // Verify that error is only emitted once when failing in _finish.
+ const w = new W()
+ w._final = common.mustCall(function (cb) {
+ cb(new Error('test'))
+ })
+ w.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(w._writableState.errorEmitted, true)
+ assert.strictEqual(err.message, 'test')
+ w.on('error', common.mustNotCall())
+ w.destroy(new Error())
+ })
+ )
+ w.end()
}
+{
+ // Verify that error is only emitted once when failing in write.
+ const w = new W()
+ w.on('error', common.mustNotCall())
+ assert.throws(
+ () => {
+ w.write(null)
+ },
+ {
+ code: 'ERR_STREAM_NULL_VALUES'
+ }
+ )
+}
+{
+ // Verify that error is only emitted once when failing in write after end.
+ const w = new W()
+ w.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(w._writableState.errorEmitted, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ w.end()
+ w.write('hello')
+ w.destroy(new Error())
+}
+{
+ // Verify that finish is not emitted after error
+ const w = new W()
+ w._final = common.mustCall(function (cb) {
+ cb(new Error())
+ })
+
+ w._write = function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+ w.on('error', common.mustCall())
+ w.on('prefinish', common.mustNotCall())
+ w.on('finish', common.mustNotCall())
+ w.write(Buffer.allocUnsafe(1))
+ w.end(Buffer.allocUnsafe(0))
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream3-cork-end.js b/test/parallel/test-stream3-cork-end.js
index 78a3fd8cd2..861cd5a40f 100644
--- a/test/parallel/test-stream3-cork-end.js
+++ b/test/parallel/test-stream3-cork-end.js
@@ -1,106 +1,99 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var Writable = stream.Writable; // Test the buffering behavior of Writable streams.
+const Writable = stream.Writable // Test the buffering behavior of Writable streams.
//
// The call to cork() triggers storing chunks which are flushed
// on calling end() and the stream subsequently ended.
//
// node version target: 0.12
-var expectedChunks = ['please', 'buffer', 'me', 'kindly'];
-var inputChunks = expectedChunks.slice(0);
-var seenChunks = [];
-var seenEnd = false;
-var w = new Writable(); // lets arrange to store the chunks
+const expectedChunks = ['please', 'buffer', 'me', 'kindly']
+const inputChunks = expectedChunks.slice(0)
+let seenChunks = []
+let seenEnd = false
+const w = new Writable() // Let's arrange to store the chunks.
w._write = function (chunk, encoding, cb) {
- // stream end event is not seen before the last write
- assert.ok(!seenEnd); // default encoding given none was specified
+ // Stream end event is not seen before the last write.
+ assert.ok(!seenEnd) // Default encoding given none was specified.
- assert.strictEqual(encoding, 'buffer');
- seenChunks.push(chunk);
- cb();
-}; // lets record the stream end event
+ assert.strictEqual(encoding, 'buffer')
+ seenChunks.push(chunk)
+ cb()
+} // Let's record the stream end event.
-
-w.on('finish', function () {
- seenEnd = true;
-});
+w.on('finish', () => {
+ seenEnd = true
+})
function writeChunks(remainingChunks, callback) {
- var writeChunk = remainingChunks.shift();
- var writeState;
+ const writeChunk = remainingChunks.shift()
+ let writeState
if (writeChunk) {
- setImmediate(function () {
- writeState = w.write(writeChunk); // we were not told to stop writing
+ setImmediate(() => {
+ writeState = w.write(writeChunk) // We were not told to stop writing.
- assert.ok(writeState);
- writeChunks(remainingChunks, callback);
- });
+ assert.ok(writeState)
+ writeChunks(remainingChunks, callback)
+ })
} else {
- callback();
+ callback()
}
-} // do an initial write
-
+} // Do an initial write.
-w.write('stuff'); // the write was immediate
+w.write('stuff') // The write was immediate.
-assert.strictEqual(seenChunks.length, 1); // reset the seen chunks
+assert.strictEqual(seenChunks.length, 1) // Reset the seen chunks.
-seenChunks = []; // trigger stream buffering
+seenChunks = [] // Trigger stream buffering.
-w.cork(); // write the bufferedChunks
+w.cork() // Write the bufferedChunks.
-writeChunks(inputChunks, function () {
- // should not have seen anything yet
- assert.strictEqual(seenChunks.length, 0); // trigger flush and ending the stream
+writeChunks(inputChunks, () => {
+ // Should not have seen anything yet.
+ assert.strictEqual(seenChunks.length, 0) // Trigger flush and ending the stream.
- w.end(); // stream should not ended in current tick
+ w.end() // Stream should not ended in current tick.
- assert.ok(!seenEnd); // buffered bytes should be seen in current tick
+ assert.ok(!seenEnd) // Buffered bytes should be seen in current tick.
- assert.strictEqual(seenChunks.length, 4); // did the chunks match
+ assert.strictEqual(seenChunks.length, 4) // Did the chunks match.
- for (var i = 0, l = expectedChunks.length; i < l; i++) {
- var seen = seenChunks[i]; // there was a chunk
+ for (let i = 0, l = expectedChunks.length; i < l; i++) {
+ const seen = seenChunks[i] // There was a chunk.
- assert.ok(seen);
- var expected = bufferShim.from(expectedChunks[i]); // it was what we expected
+ assert.ok(seen)
+ const expected = Buffer.from(expectedChunks[i]) // It was what we expected.
- assert.deepEqual(seen, expected);
+ assert.ok(seen.equals(expected))
}
- setImmediate(function () {
- // stream should have ended in next tick
- assert.ok(seenEnd);
- });
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
+ setImmediate(() => {
+ // Stream should have ended in next tick.
+ assert.ok(seenEnd)
+ })
+})
+/* replacement start */
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream3-cork-uncork.js b/test/parallel/test-stream3-cork-uncork.js
index 48875fff2e..a6940337cd 100644
--- a/test/parallel/test-stream3-cork-uncork.js
+++ b/test/parallel/test-stream3-cork-uncork.js
@@ -1,102 +1,95 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var Writable = stream.Writable; // Test the buffering behavior of Writable streams.
+const Writable = stream.Writable // Test the buffering behavior of Writable streams.
//
// The call to cork() triggers storing chunks which are flushed
// on calling uncork() in the same tick.
//
// node version target: 0.12
-var expectedChunks = ['please', 'buffer', 'me', 'kindly'];
-var inputChunks = expectedChunks.slice(0);
-var seenChunks = [];
-var seenEnd = false;
-var w = new Writable(); // lets arrange to store the chunks
+const expectedChunks = ['please', 'buffer', 'me', 'kindly']
+const inputChunks = expectedChunks.slice(0)
+let seenChunks = []
+let seenEnd = false
+const w = new Writable() // Let's arrange to store the chunks.
w._write = function (chunk, encoding, cb) {
- // default encoding given none was specified
- assert.strictEqual(encoding, 'buffer');
- seenChunks.push(chunk);
- cb();
-}; // lets record the stream end event
+ // Default encoding given none was specified.
+ assert.strictEqual(encoding, 'buffer')
+ seenChunks.push(chunk)
+ cb()
+} // Let's record the stream end event.
-
-w.on('finish', function () {
- seenEnd = true;
-});
+w.on('finish', () => {
+ seenEnd = true
+})
function writeChunks(remainingChunks, callback) {
- var writeChunk = remainingChunks.shift();
- var writeState;
+ const writeChunk = remainingChunks.shift()
+ let writeState
if (writeChunk) {
- setImmediate(function () {
- writeState = w.write(writeChunk); // we were not told to stop writing
+ setImmediate(() => {
+ writeState = w.write(writeChunk) // We were not told to stop writing.
- assert.ok(writeState);
- writeChunks(remainingChunks, callback);
- });
+ assert.ok(writeState)
+ writeChunks(remainingChunks, callback)
+ })
} else {
- callback();
+ callback()
}
-} // do an initial write
-
+} // Do an initial write.
-w.write('stuff'); // the write was immediate
+w.write('stuff') // The write was immediate.
-assert.strictEqual(seenChunks.length, 1); // reset the chunks seen so far
+assert.strictEqual(seenChunks.length, 1) // Reset the chunks seen so far.
-seenChunks = []; // trigger stream buffering
+seenChunks = [] // Trigger stream buffering.
-w.cork(); // write the bufferedChunks
+w.cork() // Write the bufferedChunks.
-writeChunks(inputChunks, function () {
- // should not have seen anything yet
- assert.strictEqual(seenChunks.length, 0); // trigger writing out the buffer
+writeChunks(inputChunks, () => {
+ // Should not have seen anything yet.
+ assert.strictEqual(seenChunks.length, 0) // Trigger writing out the buffer.
- w.uncork(); // buffered bytes should be seen in current tick
+ w.uncork() // Buffered bytes should be seen in current tick.
- assert.strictEqual(seenChunks.length, 4); // did the chunks match
+ assert.strictEqual(seenChunks.length, 4) // Did the chunks match.
- for (var i = 0, l = expectedChunks.length; i < l; i++) {
- var seen = seenChunks[i]; // there was a chunk
+ for (let i = 0, l = expectedChunks.length; i < l; i++) {
+ const seen = seenChunks[i] // There was a chunk.
- assert.ok(seen);
- var expected = bufferShim.from(expectedChunks[i]); // it was what we expected
+ assert.ok(seen)
+ const expected = Buffer.from(expectedChunks[i]) // It was what we expected.
- assert.deepEqual(seen, expected);
+ assert.ok(seen.equals(expected))
}
- setImmediate(function () {
- // the stream should not have been ended
- assert.ok(!seenEnd);
- });
-});
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
+ setImmediate(() => {
+ // The stream should not have been ended.
+ assert.ok(!seenEnd)
+ })
+})
+/* replacement start */
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
-
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js
index 66a48c91df..c62fd3e2d6 100644
--- a/test/parallel/test-stream3-pause-then-read.js
+++ b/test/parallel/test-stream3-pause-then-read.js
@@ -1,5 +1,3 @@
-"use strict";
-
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,182 +18,175 @@
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-require('../common');
+ error() {}
+}
+require('../common')
-var assert = require('assert/');
+const assert = require('assert')
-var stream = require('../../');
+const stream = require('../../lib/ours/index')
-var Readable = stream.Readable;
-var Writable = stream.Writable;
-var totalChunks = 100;
-var chunkSize = 99;
-var expectTotalData = totalChunks * chunkSize;
-var expectEndingData = expectTotalData;
-var r = new Readable({
+const Readable = stream.Readable
+const Writable = stream.Writable
+const totalChunks = 100
+const chunkSize = 99
+const expectTotalData = totalChunks * chunkSize
+let expectEndingData = expectTotalData
+const r = new Readable({
highWaterMark: 1000
-});
-var chunks = totalChunks;
+})
+let chunks = totalChunks
r._read = function (n) {
- console.log('_read called', chunks);
- if (!(chunks % 2)) setImmediate(push);else if (!(chunks % 3)) process.nextTick(push);else push();
-};
+ silentConsole.log('_read called', chunks)
+ if (!(chunks % 2)) setImmediate(push)
+ else if (!(chunks % 3)) process.nextTick(push)
+ else push()
+}
-var totalPushed = 0;
+let totalPushed = 0
function push() {
- var chunk = chunks-- > 0 ? bufferShim.alloc(chunkSize, 'x') : null;
+ const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize, 'x') : null
if (chunk) {
- totalPushed += chunk.length;
+ totalPushed += chunk.length
}
- console.log('chunks', chunks);
- r.push(chunk);
+ silentConsole.log('chunks', chunks)
+ r.push(chunk)
}
-read100(); // first we read 100 bytes
+read100() // First we read 100 bytes.
function read100() {
- readn(100, onData);
+ readn(100, onData)
}
function readn(n, then) {
- console.error("read ".concat(n));
- expectEndingData -= n;
-
- (function read() {
- var c = r.read(n);
- console.error('c', c);
- if (!c) r.once('readable', read);else {
- assert.strictEqual(c.length, n);
- assert(!r.readableFlowing);
- then();
+ silentConsole.error(`read ${n}`)
+ expectEndingData -= n
+
+ ;(function read() {
+ const c = r.read(n)
+ silentConsole.error('c', c)
+ if (!c) r.once('readable', read)
+ else {
+ assert.strictEqual(c.length, n)
+ assert(!r.readableFlowing)
+ then()
}
- })();
-} // then we listen to some data events
-
+ })()
+} // Then we listen to some data events.
function onData() {
- expectEndingData -= 100;
- console.error('onData');
- var seen = 0;
+ expectEndingData -= 100
+ silentConsole.error('onData')
+ let seen = 0
r.on('data', function od(c) {
- seen += c.length;
+ seen += c.length
if (seen >= 100) {
- // seen enough
- r.removeListener('data', od);
- r.pause();
+ // Seen enough
+ r.removeListener('data', od)
+ r.pause()
if (seen > 100) {
- // oh no, seen too much!
- // put the extra back.
- var diff = seen - 100;
- r.unshift(c.slice(c.length - diff));
- console.error('seen too much', seen, diff);
- } // Nothing should be lost in between
-
-
- setImmediate(pipeLittle);
+ // Oh no, seen too much!
+ // Put the extra back.
+ const diff = seen - 100
+ r.unshift(c.slice(c.length - diff))
+ silentConsole.error('seen too much', seen, diff)
+ } // Nothing should be lost in-between.
+
+ setImmediate(pipeLittle)
}
- });
-} // Just pipe 200 bytes, then unshift the extra and unpipe
-
+ })
+} // Just pipe 200 bytes, then unshift the extra and unpipe.
function pipeLittle() {
- expectEndingData -= 200;
- console.error('pipe a little');
- var w = new Writable();
- var written = 0;
- w.on('finish', function () {
- assert.strictEqual(written, 200);
- setImmediate(read1234);
- });
+ expectEndingData -= 200
+ silentConsole.error('pipe a little')
+ const w = new Writable()
+ let written = 0
+ w.on('finish', () => {
+ assert.strictEqual(written, 200)
+ setImmediate(read1234)
+ })
w._write = function (chunk, encoding, cb) {
- written += chunk.length;
+ written += chunk.length
if (written >= 200) {
- r.unpipe(w);
- w.end();
- cb();
+ r.unpipe(w)
+ w.end()
+ cb()
if (written > 200) {
- var diff = written - 200;
- written -= diff;
- r.unshift(chunk.slice(chunk.length - diff));
+ const diff = written - 200
+ written -= diff
+ r.unshift(chunk.slice(chunk.length - diff))
}
} else {
- setImmediate(cb);
+ setImmediate(cb)
}
- };
-
- r.pipe(w);
-} // now read 1234 more bytes
+ }
+ r.pipe(w)
+} // Now read 1234 more bytes.
function read1234() {
- readn(1234, resumePause);
+ readn(1234, resumePause)
}
function resumePause() {
- console.error('resumePause'); // don't read anything, just resume and re-pause a whole bunch
-
- r.resume();
- r.pause();
- r.resume();
- r.pause();
- r.resume();
- r.pause();
- r.resume();
- r.pause();
- r.resume();
- r.pause();
- setImmediate(pipe);
+ silentConsole.error('resumePause') // Don't read anything, just resume and re-pause a whole bunch.
+
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ setImmediate(pipe)
}
function pipe() {
- console.error('pipe the rest');
- var w = new Writable();
- var written = 0;
+ silentConsole.error('pipe the rest')
+ const w = new Writable()
+ let written = 0
w._write = function (chunk, encoding, cb) {
- written += chunk.length;
- cb();
- };
-
- w.on('finish', function () {
- console.error('written', written, totalPushed);
- assert.strictEqual(written, expectEndingData);
- assert.strictEqual(totalPushed, expectTotalData);
-
- require('tap').pass();
- });
- r.pipe(w);
-}
-
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
+ written += chunk.length
+ cb()
+ }
-_list.pop();
+ w.on('finish', () => {
+ silentConsole.error('written', written, totalPushed)
+ assert.strictEqual(written, expectEndingData)
+ assert.strictEqual(totalPushed, expectTotalData)
+ silentConsole.log('ok')
+ })
+ r.pipe(w)
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-streams-highwatermark.js b/test/parallel/test-streams-highwatermark.js
index b6231c5ba7..ab8b499998 100644
--- a/test/parallel/test-streams-highwatermark.js
+++ b/test/parallel/test-streams-highwatermark.js
@@ -1,15 +1,19 @@
-"use strict";
+'use strict'
-/**/
-var bufferShim = require('safe-buffer').Buffer;
-/**/
+const tap = require('tap')
+const silentConsole = {
+ log() {},
-var common = require('../common');
+ error() {}
+}
+const common = require('../common')
+
+const assert = require('assert')
-var assert = require('assert/');
+const stream = require('../../lib/ours/index')
-var stream = require('../../');
+const { inspect } = require('util')
{
// This test ensures that the stream implementation correctly handles values
@@ -17,81 +21,88 @@ var stream = require('../../');
// rejects invalid values.
// This number exceeds the range of 32 bit integer arithmetic but should still
// be handled correctly.
- var ovfl = Number.MAX_SAFE_INTEGER;
- var readable = stream.Readable({
+ const ovfl = Number.MAX_SAFE_INTEGER
+ const readable = stream.Readable({
highWaterMark: ovfl
- });
- assert.strictEqual(readable._readableState.highWaterMark, ovfl);
- var writable = stream.Writable({
+ })
+ assert.strictEqual(readable._readableState.highWaterMark, ovfl)
+ const writable = stream.Writable({
highWaterMark: ovfl
- });
- assert.strictEqual(writable._writableState.highWaterMark, ovfl);
-
- var _loop = function _loop() {
- var invalidHwm = _arr[_i];
-
- var _loop2 = function _loop2() {
- var type = _arr2[_i2];
- common.expectsError(function () {
- type({
- highWaterMark: invalidHwm
- });
- }, {
- type: TypeError,
- code: 'ERR_INVALID_OPT_VALUE',
- message: "The value \"".concat(invalidHwm, "\" is invalid for option \"highWaterMark\"")
- });
- };
-
- for (var _i2 = 0, _arr2 = [stream.Readable, stream.Writable]; _i2 < _arr2.length; _i2++) {
- _loop2();
+ })
+ assert.strictEqual(writable._writableState.highWaterMark, ovfl)
+
+ for (const invalidHwm of [true, false, '5', {}, -5, NaN]) {
+ for (const type of [stream.Readable, stream.Writable]) {
+ assert.throws(
+ () => {
+ type({
+ highWaterMark: invalidHwm
+ })
+ },
+ {
+ name: 'TypeError',
+ code: 'ERR_INVALID_ARG_VALUE',
+ message: "The property 'options.highWaterMark' is invalid. " + `Received ${inspect(invalidHwm)}`
+ }
+ )
}
- };
-
- for (var _i = 0, _arr = [true, false, '5', {}, -5, NaN]; _i < _arr.length; _i++) {
- _loop();
}
}
{
// This test ensures that the push method's implementation
// correctly handles the edge case where the highWaterMark and
// the state.length are both zero
- var _readable = stream.Readable({
+ const readable = stream.Readable({
highWaterMark: 0
- });
-
- for (var i = 0; i < 3; i++) {
- var needMoreData = _readable.push();
+ })
- assert.strictEqual(needMoreData, true);
+ for (let i = 0; i < 3; i++) {
+ const needMoreData = readable.push()
+ assert.strictEqual(needMoreData, true)
}
}
{
// This test ensures that the read(n) method's implementation
// correctly handles the edge case where the highWaterMark, state.length
// and n are all zero
- var _readable2 = stream.Readable({
+ const readable = stream.Readable({
highWaterMark: 0
- });
-
- _readable2._read = common.mustCall();
-
- _readable2.read(0);
+ })
+ readable._read = common.mustCall()
+ readable.read(0)
}
-;
-
-(function () {
- var t = require('tap');
-
- t.pass('sync run');
-})();
-
-var _list = process.listeners('uncaughtException');
-
-process.removeAllListeners('uncaughtException');
-
-_list.pop();
+{
+ // Parse size as decimal integer
+ ;['1', '1.0', 1].forEach((size) => {
+ const readable = new stream.Readable({
+ read: common.mustCall(),
+ highWaterMark: 0
+ })
+ readable.read(size)
+ assert.strictEqual(readable._readableState.highWaterMark, Number(size))
+ })
+}
+{
+ // Test highwatermark limit
+ const hwm = 0x40000000 + 1
+ const readable = stream.Readable({
+ read() {}
+ })
+ assert.throws(
+ () => readable.read(hwm),
+ common.expectsError({
+ code: 'ERR_OUT_OF_RANGE',
+ message: 'The value of "size" is out of range.' + ' It must be <= 1GiB. Received ' + hwm
+ })
+ )
+}
+/* replacement start */
-_list.forEach(function (e) {
- return process.on('uncaughtException', e);
-});
\ No newline at end of file
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */