[Pkg-javascript-commits] [node-readable-stream] 01/06: Imported Upstream version 2.0.3
Ross Gammon
ross-guest at moszumanska.debian.org
Wed Nov 4 22:14:37 UTC 2015
This is an automated email from the git hooks/post-receive script.
ross-guest pushed a commit to branch master
in repository node-readable-stream.
commit b25a739dd06d909d6057ef6b66d54f17164e20d4
Author: Ross Gammon <rossgammon at mail.dk>
Date: Fri Oct 23 18:46:20 2015 +0200
Imported Upstream version 2.0.3
---
.gitignore | 2 +
.npmignore | 5 +
.travis.yml | 40 +
LICENSE | 18 +
README.md | 36 +
build/.gitignore | 1 +
build/build.js | 105 ++
build/common-replacements.js | 41 +
build/doc-replacements.js | 6 +
build/files.js | 223 +++
build/package.json | 12 +
build/test-replacements.js | 198 +++
doc/stream.markdown | 1696 ++++++++++++++++++++
doc/wg-meetings/2015-01-30.md | 60 +
duplex.js | 1 +
examples/CAPSLOCKTYPER.JS | 32 +
examples/typer.js | 17 +
lib/_stream_duplex.js | 82 +
lib/_stream_passthrough.js | 27 +
lib/_stream_readable.js | 961 +++++++++++
lib/_stream_transform.js | 197 +++
lib/_stream_writable.js | 527 ++++++
package.json | 37 +
passthrough.js | 1 +
readable.js | 12 +
test/browser.js | 62 +
test/browser/test-stream-big-packet.js | 62 +
test/browser/test-stream-big-push.js | 68 +
test/browser/test-stream-duplex.js | 35 +
test/browser/test-stream-end-paused.js | 32 +
test/browser/test-stream-ispaused.js | 27 +
test/browser/test-stream-pipe-after-end.js | 64 +
test/browser/test-stream-pipe-cleanup.js | 108 ++
test/browser/test-stream-pipe-error-handling.js | 102 ++
test/browser/test-stream-pipe-event.js | 32 +
.../test-stream-pipe-without-listenerCount.js | 27 +
test/browser/test-stream-push-order.js | 34 +
test/browser/test-stream-push-strings.js | 49 +
...test-stream-readable-constructor-set-methods.js | 22 +
test/browser/test-stream-readable-event.js | 114 ++
...est-stream-transform-constructor-set-methods.js | 35 +
...est-stream-transform-objectmode-falsey-value.js | 36 +
.../test-stream-transform-split-objectmode.js | 58 +
test/browser/test-stream-unshift-empty-chunk.js | 63 +
test/browser/test-stream-unshift-read-race.js | 110 ++
...test-stream-writable-change-default-encoding.js | 64 +
...test-stream-writable-constructor-set-methods.js | 40 +
.../test-stream-writable-decoded-encoding.js | 45 +
test/browser/test-stream-writev.js | 105 ++
.../test-stream2-base64-single-char-read-end.js | 41 +
test/browser/test-stream2-compatibility.js | 33 +
test/browser/test-stream2-large-read-stall.js | 62 +
test/browser/test-stream2-objects.js | 306 ++++
test/browser/test-stream2-pipe-error-handling.js | 88 +
.../test-stream2-pipe-error-once-listener.js | 41 +
test/browser/test-stream2-push.js | 120 ++
.../test-stream2-readable-empty-buffer-no-eof.js | 91 ++
test/browser/test-stream2-readable-from-list.js | 66 +
test/browser/test-stream2-readable-legacy-drain.js | 52 +
.../browser/test-stream2-readable-non-empty-end.js | 57 +
test/browser/test-stream2-readable-wrap-empty.js | 24 +
test/browser/test-stream2-readable-wrap.js | 86 +
test/browser/test-stream2-set-encoding.js | 317 ++++
test/browser/test-stream2-transform.js | 473 ++++++
test/browser/test-stream2-unpipe-drain.js | 65 +
test/browser/test-stream2-writable.js | 375 +++++
test/browser/test-stream3-pause-then-read.js | 150 ++
test/common.js | 511 ++++++
test/fixtures/x1024.txt | 1 +
test/parallel/test-stream-big-packet.js | 60 +
test/parallel/test-stream-big-push.js | 64 +
test/parallel/test-stream-duplex.js | 32 +
test/parallel/test-stream-end-paused.js | 33 +
test/parallel/test-stream-ispaused.js | 24 +
test/parallel/test-stream-pipe-after-end.js | 66 +
test/parallel/test-stream-pipe-cleanup.js | 105 ++
test/parallel/test-stream-pipe-error-handling.js | 111 ++
test/parallel/test-stream-pipe-event.js | 29 +
.../test-stream-pipe-without-listenerCount.js | 20 +
test/parallel/test-stream-push-order.js | 32 +
test/parallel/test-stream-push-strings.js | 46 +
...test-stream-readable-constructor-set-methods.js | 19 +
test/parallel/test-stream-readable-event.js | 106 ++
.../test-stream-readable-flow-recursion.js | 56 +
...est-stream-transform-constructor-set-methods.js | 32 +
...est-stream-transform-objectmode-falsey-value.js | 33 +
.../test-stream-transform-split-objectmode.js | 52 +
test/parallel/test-stream-unshift-empty-chunk.js | 61 +
test/parallel/test-stream-unshift-read-race.js | 113 ++
...test-stream-writable-change-default-encoding.js | 52 +
...test-stream-writable-constructor-set-methods.js | 35 +
.../test-stream-writable-decoded-encoding.js | 40 +
test/parallel/test-stream-writev.js | 106 ++
.../test-stream2-base64-single-char-read-end.js | 37 +
test/parallel/test-stream2-compatibility.js | 32 +
test/parallel/test-stream2-finish-pipe.js | 21 +
test/parallel/test-stream2-large-read-stall.js | 62 +
test/parallel/test-stream2-objects.js | 336 ++++
test/parallel/test-stream2-pipe-error-handling.js | 85 +
.../test-stream2-pipe-error-once-listener.js | 43 +
test/parallel/test-stream2-push.js | 118 ++
test/parallel/test-stream2-read-sync-stack.js | 34 +
.../test-stream2-readable-empty-buffer-no-eof.js | 98 ++
test/parallel/test-stream2-readable-from-list.js | 99 ++
.../parallel/test-stream2-readable-legacy-drain.js | 55 +
.../test-stream2-readable-non-empty-end.js | 58 +
test/parallel/test-stream2-readable-wrap-empty.js | 23 +
test/parallel/test-stream2-readable-wrap.js | 90 ++
test/parallel/test-stream2-set-encoding.js | 346 ++++
test/parallel/test-stream2-transform.js | 508 ++++++
test/parallel/test-stream2-unpipe-drain.js | 60 +
test/parallel/test-stream2-unpipe-leak.js | 54 +
test/parallel/test-stream2-writable.js | 391 +++++
test/parallel/test-stream3-pause-then-read.js | 147 ++
transform.js | 1 +
writable.js | 1 +
116 files changed, 12716 insertions(+)
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..36690ab
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+node_modules/
+.zuul.yml
diff --git a/.npmignore b/.npmignore
new file mode 100644
index 0000000..38344f8
--- /dev/null
+++ b/.npmignore
@@ -0,0 +1,5 @@
+build/
+test/
+examples/
+fs.js
+zlib.js
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..f796827
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,40 @@
+sudo: false
+language: node_js
+before_install:
+ - npm install -g npm at 2
+ - npm install -g npm
+notifications:
+ email: false
+matrix:
+ include:
+ - node_js: '0.8'
+ env: TASK=test
+ - node_js: '0.10'
+ env: TASK=test
+ - node_js: '0.11'
+ env: TASK=test
+ - node_js: '0.12'
+ env: TASK=test
+ - node_js: 'iojs'
+ env: TASK=test
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="41..beta"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="36..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="['6.1', '7.1', '8.2']"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="['6.1', '7.1', '8.2']"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest"
+script: "npm run $TASK"
+env:
+ global:
+ - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc=
+ - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI=
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..e3d4e69
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,18 @@
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..f9fb520
--- /dev/null
+++ b/README.md
@@ -0,0 +1,36 @@
+# readable-stream
+
+***Node-core streams for userland*** [](https://travis-ci.org/nodejs/readable-stream)
+
+
+[](https://nodei.co/npm/readable-stream/)
+[](https://nodei.co/npm/readable-stream/)
+
+
+[](https://saucelabs.com/u/readable-stream)
+
+```bash
+npm install --save readable-stream
+```
+
+***Node-core streams for userland***
+
+This package is a mirror of the Streams2 and Streams3 implementations in
+Node-core, including [documentation](doc/stream.markdown).
+
+If you want to guarantee a stable streams base, regardless of what version of
+Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
+
+As of version 2.0.0 **readable-stream** uses semantic versioning.
+
+# Streams WG Team Members
+
+* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson at gmail.com>
+ - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B
+* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf at gmail.com>
+ - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
+* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod at vagg.org>
+ - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D
+* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam at outlook.com>
+* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus at gmail.com>
+* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d at domenic.me>
diff --git a/build/.gitignore b/build/.gitignore
new file mode 100644
index 0000000..3c3629e
--- /dev/null
+++ b/build/.gitignore
@@ -0,0 +1 @@
+node_modules
diff --git a/build/build.js b/build/build.js
new file mode 100755
index 0000000..000ccc7
--- /dev/null
+++ b/build/build.js
@@ -0,0 +1,105 @@
+#!/usr/bin/env node
+
+const hyperquest = require('hyperzip')(require('hyperdirect'))
+ , bl = require('bl')
+ , fs = require('fs')
+ , path = require('path')
+ , cheerio = require('cheerio')
+
+ , files = require('./files')
+ , testReplace = require('./test-replacements')
+ , docReplace = require('./doc-replacements')
+
+ , srcurlpfx = `https://raw.githubusercontent.com/nodejs/node/v${process.argv[2]}/`
+ , libsrcurl = srcurlpfx + 'lib/'
+ , testsrcurl = srcurlpfx + 'test/parallel/'
+ , testlisturl = `https://github.com/nodejs/node/tree/v${process.argv[2]}/test/parallel`
+ , libourroot = path.join(__dirname, '../lib/')
+ , testourroot = path.join(__dirname, '../test/parallel/')
+ , docurlpfx = `https://raw.githubusercontent.com/nodejs/node/v${process.argv[2]}/doc/api/`
+ , docourroot = path.join(__dirname, '../doc')
+
+
+if (!/\d\.\d\.\d+/.test(process.argv[2])) {
+ console.error('Usage: build.js xx.yy.zz')
+ return process.exit(1);
+}
+
+function processFile (url, out, replacements) {
+ hyperquest(url).pipe(bl(function (err, data) {
+ if (err)
+ throw err
+
+ data = data.toString()
+ replacements.forEach(function (replacement) {
+ data = data.replace.apply(data, replacement)
+ })
+
+ fs.writeFile(out, data, 'utf8', function (err) {
+ if (err)
+ throw err
+
+ console.log('Wrote', out)
+ })
+ }))
+}
+
+function processLibFile (file) {
+ var replacements = files[file]
+ , url = libsrcurl + file
+ , out = path.join(libourroot, file)
+
+ processFile(url, out, replacements)
+}
+
+
+function processTestFile (file) {
+ var replacements = testReplace.all
+ , url = testsrcurl + file
+ , out = path.join(testourroot, file)
+
+ if (testReplace[file])
+ replacements = replacements.concat(testReplace[file])
+
+ processFile(url, out, replacements)
+}
+
+
+if (!/\d\.\d\.\d+/.test(process.argv[2])) {
+ console.log('Usage: build.js <node version>')
+ return process.exit(-1)
+}
+
+
+//--------------------------------------------------------------------
+// Grab & process files in ../lib/
+
+Object.keys(files).forEach(processLibFile)
+
+//--------------------------------------------------------------------
+// Discover, grab and process all test-stream* files on joyent/node
+
+hyperquest(testlisturl).pipe(bl(function (err, data) {
+ if (err)
+ throw err
+
+ var $ = cheerio.load(data.toString())
+
+ $('table.files .js-directory-link').each(function () {
+ var file = $(this).text()
+ if (/^test-stream/.test(file) && !/-wrap\.js$/.test(file))
+ processTestFile(file)
+ })
+}))
+
+processFile(docurlpfx + 'stream.markdown', path.join(docourroot, 'stream.markdown'), docReplace)
+
+
+//--------------------------------------------------------------------
+// Grab the joyent/node test/common.js
+
+processFile(
+ testsrcurl.replace(/parallel\/$/, 'common.js')
+ , path.join(testourroot, '../common.js')
+ , testReplace['common.js']
+)
diff --git a/build/common-replacements.js b/build/common-replacements.js
new file mode 100644
index 0000000..fe5badf
--- /dev/null
+++ b/build/common-replacements.js
@@ -0,0 +1,41 @@
+module.exports.altForEachImplReplacement = [
+ /$/
+ , '\nfunction forEach (xs, f) {\n'
+ + ' for (var i = 0, l = xs.length; i < l; i++) {\n'
+ + ' f(xs[i], i);\n'
+ + ' }\n'
+ + '}\n'
+]
+
+module.exports.altForEachUseReplacement = [
+ /(\W)([\w\.\(\),\[\]]+)(\.forEach\()/gm
+ , '$1forEach($2, '
+]
+
+module.exports.altIndexOfImplReplacement = [
+ /$/
+ , '\nfunction indexOf (xs, x) {\n'
+ + ' for (var i = 0, l = xs.length; i < l; i++) {\n'
+ + ' if (xs[i] === x) return i;\n'
+ + ' }\n'
+ + ' return -1;\n'
+ + '}\n'
+]
+
+module.exports.altIndexOfUseReplacement = [
+ /(\W)([\w\.\(\),\[\]]+)(\.indexOf\()/gm
+ , '$1indexOf($2, '
+]
+module.exports.objectKeysDefine = [
+ /^('use strict';)$/m
+ , '$1\n\n/*<replacement>*/\nvar objectKeys = Object.keys || function (obj) {\n'
+ + ' var keys = [];\n'
+ + ' for (var key in obj) keys.push(key);\n'
+ + ' return keys;\n'
+ + '}\n/*</replacement>*/\n'
+]
+
+module.exports.objectKeysReplacement = [
+ /Object\.keys/g
+ , 'objectKeys'
+ ]
diff --git a/build/doc-replacements.js b/build/doc-replacements.js
new file mode 100644
index 0000000..2887dff
--- /dev/null
+++ b/build/doc-replacements.js
@@ -0,0 +1,6 @@
+module.exports = [
+ [
+ /\]([\:|\(]\W?)([^\#]\w+\.html(?:#\w+))/g,
+ `]$1https://iojs.org/dist/v${process.argv[2]}/doc/api/$2`
+ ]
+]
diff --git a/build/files.js b/build/files.js
new file mode 100644
index 0000000..2578126
--- /dev/null
+++ b/build/files.js
@@ -0,0 +1,223 @@
+/* This file lists the files to be fetched from the node repo
+ * in the /lib/ directory which will be placed in the ../lib/
+ * directory after having each of the "replacements" in the
+ * array for that file applied to it. The replacements are
+ * simply the arguments to String#replace, so they can be
+ * strings, regexes, functions.
+ */
+
+const headRegexp = /(^module.exports = \w+;?)/m
+
+ , requireReplacement = [
+ /(require\(['"])(_stream_)/g
+ , '$1./$2'
+ ]
+
+ , instanceofReplacement = [
+ /instanceof Stream\.(\w+)/g
+ , function (match, streamType) {
+ return 'instanceof ' + streamType
+ }
+ ]
+
+ // use the string_decoder in node_modules rather than core
+ , stringDecoderReplacement = [
+ /(require\(['"])(string_decoder)(['"]\))/g
+ , '$1$2/$3'
+ ]
+
+ , bufferReplacement = [
+ headRegexp
+ , '$1\n\n/*<replacement>*/\nvar Buffer = require(\'buffer\').Buffer;\n/*</replacement>*/\n'
+ ]
+
+ , addDuplexRequire = [
+ /^(function (?:Writable|Readable)(?:State)?.*{)/gm
+ , '$1\n var Duplex = require(\'./_stream_duplex\');\n'
+ ]
+
+ , altForEachImplReplacement = require('./common-replacements').altForEachImplReplacement
+ , altForEachUseReplacement = require('./common-replacements').altForEachUseReplacement
+ , altIndexOfImplReplacement = require('./common-replacements').altIndexOfImplReplacement
+ , altIndexOfUseReplacement = require('./common-replacements').altIndexOfUseReplacement
+
+ , utilReplacement = [
+ /^var util = require\('util'\);/m
+ , '\n/*<replacement>*/\nvar util = require(\'core-util-is\');\n'
+ + 'util.inherits = require(\'inherits\');\n/*</replacement>*/\n'
+ ]
+
+ , debugLogReplacement = [
+ /var debug = util.debuglog\('stream'\);/
+ , '\n\n/*<replacement>*/\nvar debugUtil = require(\'util\');\n'
+ + 'var debug;\n'
+ + 'if (debugUtil && debugUtil.debuglog) {\n'
+ + ' debug = debugUtil.debuglog(\'stream\');\n'
+ + '} else {\n'
+ + ' debug = function () {};\n'
+ + '}\n/*</replacement>*/\n'
+ ]
+
+ , deprecateReplacement = [
+ /util.deprecate/
+ , 'require(\'util-deprecate\')'
+ ]
+
+ , objectDefinePropertyReplacement = [
+ /(Object\.defineProperties)/
+ , 'if (Object.defineProperties) $1'
+ ]
+ , objectDefinePropertySingReplacement = [
+ /Object\.defineProperty\(([\w\W]+?)\}\);/
+ , '(function (){try {\n'
+ + 'Object.defineProperty\($1});\n'
+ + '}catch(_){}}());\n'
+ ]
+
+ , isArrayDefine = [
+ headRegexp
+ , '$1\n\n/*<replacement>*/\nvar isArray = require(\'isarray\');\n/*</replacement>*/\n'
+ ]
+
+ , isArrayReplacement = [
+ /Array\.isArray/g
+ , 'isArray'
+ ]
+
+ , objectKeysDefine = require('./common-replacements').objectKeysDefine
+
+ , objectKeysReplacement = require('./common-replacements').objectKeysReplacement
+
+ , eventEmittterReplacement = [
+ /(require\('events'\)(?:\.EventEmitter)?;)/
+ , '$1\n\n/*<replacement>*/\n'
+ + 'var EElistenerCount = function(emitter, type) {\n'
+ + ' return emitter.listeners(type).length;\n'
+ + '};\n/*</replacement>*/\n'
+ ]
+
+ , eventEmittterListenerCountReplacement = [
+ /(EE\.listenerCount)/g
+ , 'EElistenerCount'
+ ]
+
+ , constReplacement = [
+ /const/g
+ , 'var'
+ ]
+
+ , bufferIsEncodingReplacement = [
+ /Buffer.isEncoding\((\w+)\)/
+ , '([\'hex\', \'utf8\', \'utf-8\', \'ascii\', \'binary\', \'base64\',\n'
+ + '\'ucs2\', \'ucs-2\',\'utf16le\', \'utf-16le\', \'raw\']\n'
+ + '.indexOf(($1 + \'\').toLowerCase()) > -1)'
+ ]
+
+ , requireStreamReplacement = [
+ /var Stream = require\('stream'\);/
+ , '\n\n/*<replacement>*/\n'
+ + 'var Stream;\n(function (){try{\n'
+ + ' Stream = require(\'st\' + \'ream\');\n'
+ + '}catch(_){}finally{\n'
+ + ' if (!Stream)\n'
+ + ' Stream = require(\'events\').EventEmitter;\n'
+ + '}}())'
+ + '\n/*</replacement>*/\n'
+ ]
+
+ , isBufferReplacement = [
+ /(\w+) instanceof Buffer/g
+ , 'Buffer.isBuffer($1)'
+ ]
+
+ , processNextTickImport = [
+ headRegexp
+ , '$1\n\n/*<replacement>*/\nvar processNextTick = require(\'process-nextick-args\');\n/*</replacement>*/\n'
+ ]
+
+ , processNextTickReplacement = [
+ /process.nextTick\(/g
+ , 'processNextTick('
+ ]
+
+ , internalUtilReplacement = [
+ /^var internalUtil = require\('internal\/util'\);/m
+ , '\n/*<replacement>*/\nvar internalUtil = {\n deprecate: require(\'util-deprecate\')\n};\n'
+ + '/*</replacement>*/\n'
+ ]
+
+module.exports['_stream_duplex.js'] = [
+ constReplacement
+ , requireReplacement
+ , instanceofReplacement
+ , utilReplacement
+ , stringDecoderReplacement
+ , altForEachImplReplacement
+ , altForEachUseReplacement
+ , objectKeysReplacement
+ , objectKeysDefine
+ , processNextTickImport
+ , processNextTickReplacement
+]
+
+module.exports['_stream_passthrough.js'] = [
+ constReplacement
+ , requireReplacement
+ , instanceofReplacement
+ , utilReplacement
+ , stringDecoderReplacement
+]
+
+module.exports['_stream_readable.js'] = [
+ constReplacement
+ , addDuplexRequire
+ , requireReplacement
+ , instanceofReplacement
+ , bufferReplacement
+ , altForEachImplReplacement
+ , altForEachUseReplacement
+ , altIndexOfImplReplacement
+ , altIndexOfUseReplacement
+ , instanceofReplacement
+ , stringDecoderReplacement
+ , isArrayDefine
+ , isArrayReplacement
+ , debugLogReplacement
+ , utilReplacement
+ , stringDecoderReplacement
+ , eventEmittterReplacement
+ , requireStreamReplacement
+ , isBufferReplacement
+ , processNextTickImport
+ , processNextTickReplacement
+ , eventEmittterListenerCountReplacement
+]
+
+module.exports['_stream_transform.js'] = [
+ constReplacement
+ , requireReplacement
+ , instanceofReplacement
+ , utilReplacement
+ , stringDecoderReplacement
+]
+
+module.exports['_stream_writable.js'] = [
+ constReplacement
+ , addDuplexRequire
+ , requireReplacement
+ , instanceofReplacement
+ , bufferReplacement
+ , utilReplacement
+ , stringDecoderReplacement
+ , debugLogReplacement
+ , deprecateReplacement
+ , objectDefinePropertyReplacement
+ , objectDefinePropertySingReplacement
+ , bufferIsEncodingReplacement
+ , [ /^var assert = require\('assert'\);$/m, '' ]
+ , requireStreamReplacement
+ , isBufferReplacement
+ , processNextTickImport
+ , processNextTickReplacement
+ , internalUtilReplacement
+]
diff --git a/build/package.json b/build/package.json
new file mode 100644
index 0000000..7da9727
--- /dev/null
+++ b/build/package.json
@@ -0,0 +1,12 @@
+{
+ "name": "readable-stream-build",
+ "version": "0.0.0",
+ "description": "",
+ "main": "build.js",
+ "dependencies": {
+ "bl": "~0.6.0",
+ "hyperzip": "0.0.0",
+ "hyperdirect": "0.0.0",
+ "cheerio": "~0.13.1"
+ }
+}
diff --git a/build/test-replacements.js b/build/test-replacements.js
new file mode 100644
index 0000000..94f5dca
--- /dev/null
+++ b/build/test-replacements.js
@@ -0,0 +1,198 @@
+const altForEachImplReplacement = require('./common-replacements').altForEachImplReplacement
+ , altForEachUseReplacement = require('./common-replacements').altForEachUseReplacement
+ , altIndexOfImplReplacement = require('./common-replacements').altIndexOfImplReplacement
+ , altIndexOfUseReplacement = require('./common-replacements').altIndexOfUseReplacement
+ , objectKeysDefine =
+ require('./common-replacements').objectKeysDefine
+ , objectKeysReplacement =
+ require('./common-replacements').objectKeysReplacement
+
+module.exports.all = [
+ [
+ /require\(['"]stream['"]\)/g
+ , 'require(\'../../\')'
+ ]
+
+ // some tests need stream.Stream but readable.js doesn't offer that
+ // and we've undone it with the previous replacement
+
+ , [
+ /stream\.Stream|require\('\.\.\/\.\.\/'\)\.Stream/g
+ , 'require(\'stream\').Stream'
+ ]
+
+ , [
+ /require\(['"](_stream_\w+)['"]\)/g
+ , 'require(\'../../lib/$1\')'
+ ]
+
+ , [
+ /Stream.(Readable|Writable|Duplex|Transform|PassThrough)/g
+ , 'require(\'../../\').$1'
+ ]
+
+]
+
+module.exports['test-stream2-basic.js'] = [
+ altForEachImplReplacement
+ , altForEachUseReplacement
+]
+
+module.exports['test-stream2-objects.js'] = [
+ altForEachImplReplacement
+ , altForEachUseReplacement
+]
+
+module.exports['test-stream2-transform.js'] = [
+ altForEachImplReplacement
+ , altForEachUseReplacement
+]
+
+module.exports['test-stream2-writable.js'] = [
+ altForEachImplReplacement
+ , altForEachUseReplacement
+]
+
+module.exports['test-stream-big-packet.js'] = [
+ altIndexOfImplReplacement
+ , altIndexOfUseReplacement
+]
+
+module.exports['common.js'] = [
+ objectKeysDefine
+ , objectKeysReplacement
+ , altForEachImplReplacement
+ , altForEachUseReplacement
+
+ , [
+ /(exports.mustCall[\s\S]*)/m
+ , '$1\n'
+ + 'if (!util._errnoException) {\n'
+ + ' var uv;\n'
+ + ' util._errnoException = function(err, syscall) {\n'
+ + ' if (util.isUndefined(uv)) try { uv = process.binding(\'uv\'); } catch (e) {}\n'
+ + ' var errname = uv ? uv.errname(err) : \'\';\n'
+ + ' var e = new Error(syscall + \' \' + errname);\n'
+ + ' e.code = errname;\n'
+ + ' e.errno = errname;\n'
+ + ' e.syscall = syscall;\n'
+ + ' return e;\n'
+ + ' };\n'
+ + '}\n'
+ ]
+
+ // for streams2 on node 0.11
+ // and dtrace in 0.10
+ , [
+ /^( for \(var x in global\) \{|function leakedGlobals\(\) \{)$/m
+ , ' /*<replacement>*/\n'
+ + ' if (typeof constructor == \'function\')\n'
+ + ' knownGlobals.push(constructor);\n'
+ + ' if (typeof DTRACE_NET_SOCKET_READ == \'function\')\n'
+ + ' knownGlobals.push(DTRACE_NET_SOCKET_READ);\n'
+ + ' if (typeof DTRACE_NET_SOCKET_WRITE == \'function\')\n'
+ + ' knownGlobals.push(DTRACE_NET_SOCKET_WRITE);\n'
+ + ' /*</replacement>*/\n\n$1'
+ ]
+
+ // for node 0.8
+ , [
+ /^/
+ , '/*<replacement>*/'
+ + '\nif (!global.setImmediate) {\n'
+ + ' global.setImmediate = function setImmediate(fn) {\n'
+
+ + ' return setTimeout(fn.bind.apply(fn, arguments), 0);\n'
+ + ' };\n'
+ + '}\n'
+ + 'if (!global.clearImmediate) {\n'
+ + ' global.clearImmediate = function clearImmediate(i) {\n'
+ + ' return clearTimeout(i);\n'
+ + ' };\n'
+ + '}\n'
+ + '/*</replacement>*/\n'
+ ]
+ , [
+ /^if \(global\.ArrayBuffer\) \{([^\}]+)\}$/m
+ , '/*<replacement>*/if (!process.browser) {'
+ + '\nif \(global\.ArrayBuffer\) {$1}\n'
+ + '}/*</replacement>*/\n'
+ ]
+ , [
+ /^Object\.defineProperty\(([\w\W]+?)\}\)\;/mg
+ , '/*<replacement>*/if (!process.browser) {'
+ + '\nObject\.defineProperty($1});\n'
+ + '}/*</replacement>*/\n'
+ ]
+ , [
+ /if \(!process\.send\)/
+ , 'if (!process.send && !process.browser)'
+ ]
+]
+
+// this test has some trouble with the nextTick depth when run
+// to stdout, it's also very noisy so we'll quiet it
+module.exports['test-stream-pipe-multi.js'] = [
+ altForEachImplReplacement
+ , altForEachUseReplacement
+ , [
+ /console\.error/g
+ , '//console.error'
+ ]
+
+ , [
+ /process\.nextTick/g
+ , 'setImmediate'
+ ]
+]
+
+// just noisy
+module.exports['test-stream2-large-read-stall.js'] = [
+ [
+ /console\.error/g
+ , ';false && console.error'
+ ]
+]
+
+module.exports['test-stream-pipe-cleanup.js'] = [
+ [
+ /(function Writable\(\) \{)/
+ , 'if (/^v0\\.8\\./.test(process.version))\n return\n\n$1'
+ ]
+]
+
+module.exports['test-stream2-stderr-sync.js'] = [
+ altForEachImplReplacement
+ , altForEachUseReplacement
+ , [
+ // 'tty_wrap' is too different across node versions.
+ // this bypasses it and replicates a console.error() test
+ /(function child0\(\) \{)/
+ , '$1\n'
+ + ' return console.error(\'child 0\\nfoo\\nbar\\nbaz\');\n'
+ ]
+]
+
+module.exports['test-stream-unshift-read-race.js'] = [
+ [
+ /data\.slice\(pos, pos \+ n\)/g,
+ 'data.slice(pos, Math.min(pos + n, data.length))'
+ ]
+]
+
+module.exports['test-stream-pipe-without-listenerCount.js'] = [
+ [
+ /const r \= new require\(\'stream'\)\.Stream\(\);/,
+ 'var r = new stream({\n'
+ + ' read: noop'
+ + '});'
+ ],
+ [
+ /const w \= new require\('stream'\)\.Stream\(\);/,
+ 'var w = new stream();'
+ ],
+ [
+ /const /g,
+ 'var '
+ ]
+]
diff --git a/doc/stream.markdown b/doc/stream.markdown
new file mode 100644
index 0000000..a7259e1
--- /dev/null
+++ b/doc/stream.markdown
@@ -0,0 +1,1696 @@
+# Stream
+
+ Stability: 2 - Stable
+
+A stream is an abstract interface implemented by various objects in
+Node.js. For example a [request to an HTTP
+server](https://iojs.org/dist/v4.1.2/doc/api/http.html#http_http_incomingmessage) is a stream, as is
+[stdout][]. Streams are readable, writable, or both. All streams are
+instances of [EventEmitter][]
+
+You can load the Stream base classes by doing `require('stream')`.
+There are base classes provided for [Readable][] streams, [Writable][]
+streams, [Duplex][] streams, and [Transform][] streams.
+
+This document is split up into 3 sections. The first explains the
+parts of the API that you need to be aware of to use streams in your
+programs. If you never implement a streaming API yourself, you can
+stop there.
+
+The second section explains the parts of the API that you need to use
+if you implement your own custom streams yourself. The API is
+designed to make this easy for you to do.
+
+The third section goes into more depth about how streams work,
+including some of the internal mechanisms and functions that you
+should probably not modify unless you definitely know what you are
+doing.
+
+
+## API for Stream Consumers
+
+<!--type=misc-->
+
+Streams can be either [Readable][], [Writable][], or both ([Duplex][]).
+
+All streams are EventEmitters, but they also have other custom methods
+and properties depending on whether they are Readable, Writable, or
+Duplex.
+
+If a stream is both Readable and Writable, then it implements all of
+the methods and events below. So, a [Duplex][] or [Transform][] stream is
+fully described by this API, though their implementation may be
+somewhat different.
+
+It is not necessary to implement Stream interfaces in order to consume
+streams in your programs. If you **are** implementing streaming
+interfaces in your own program, please also refer to
+[API for Stream Implementors][] below.
+
+Almost all Node.js programs, no matter how simple, use Streams in some
+way. Here is an example of using Streams in an Node.js program:
+
+```javascript
+var http = require('http');
+
+var server = http.createServer(function (req, res) {
+ // req is an http.IncomingMessage, which is a Readable Stream
+ // res is an http.ServerResponse, which is a Writable Stream
+
+ var body = '';
+ // we want to get the data as utf8 strings
+ // If you don't set an encoding, then you'll get Buffer objects
+ req.setEncoding('utf8');
+
+ // Readable streams emit 'data' events once a listener is added
+ req.on('data', function (chunk) {
+ body += chunk;
+ });
+
+ // the end event tells you that you have entire body
+ req.on('end', function () {
+ try {
+ var data = JSON.parse(body);
+ } catch (er) {
+ // uh oh! bad json!
+ res.statusCode = 400;
+ return res.end('error: ' + er.message);
+ }
+
+ // write back something interesting to the user:
+ res.write(typeof data);
+ res.end();
+ });
+});
+
+server.listen(1337);
+
+// $ curl localhost:1337 -d '{}'
+// object
+// $ curl localhost:1337 -d '"foo"'
+// string
+// $ curl localhost:1337 -d 'not json'
+// error: Unexpected token o
+```
+
+### Class: stream.Readable
+
+<!--type=class-->
+
+The Readable stream interface is the abstraction for a *source* of
+data that you are reading from. In other words, data comes *out* of a
+Readable stream.
+
+A Readable stream will not start emitting data until you indicate that
+you are ready to receive it.
+
+Readable streams have two "modes": a **flowing mode** and a **paused
+mode**. When in flowing mode, data is read from the underlying system
+and provided to your program as fast as possible. In paused mode, you
+must explicitly call `stream.read()` to get chunks of data out.
+Streams start out in paused mode.
+
+**Note**: If no data event handlers are attached, and there are no
+[`pipe()`][] destinations, and the stream is switched into flowing
+mode, then data will be lost.
+
+You can switch to flowing mode by doing any of the following:
+
+* Adding a [`'data'` event][] handler to listen for data.
+* Calling the [`resume()`][] method to explicitly open the flow.
+* Calling the [`pipe()`][] method to send the data to a [Writable][].
+
+You can switch back to paused mode by doing either of the following:
+
+* If there are no pipe destinations, by calling the [`pause()`][]
+ method.
+* If there are pipe destinations, by removing any [`'data'` event][]
+ handlers, and removing all pipe destinations by calling the
+ [`unpipe()`][] method.
+
+Note that, for backwards compatibility reasons, removing `'data'`
+event handlers will **not** automatically pause the stream. Also, if
+there are piped destinations, then calling `pause()` will not
+guarantee that the stream will *remain* paused once those
+destinations drain and ask for more data.
+
+Examples of readable streams include:
+
+* [http responses, on the client](https://iojs.org/dist/v4.1.2/doc/api/http.html#http_http_incomingmessage)
+* [http requests, on the server](https://iojs.org/dist/v4.1.2/doc/api/http.html#http_http_incomingmessage)
+* [fs read streams](https://iojs.org/dist/v4.1.2/doc/api/fs.html#fs_class_fs_readstream)
+* [zlib streams][]
+* [crypto streams][]
+* [tcp sockets][]
+* [child process stdout and stderr][]
+* [process.stdin][]
+
+#### Event: 'readable'
+
+When a chunk of data can be read from the stream, it will emit a
+`'readable'` event.
+
+In some cases, listening for a `'readable'` event will cause some data
+to be read into the internal buffer from the underlying system, if it
+hadn't already.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('readable', function() {
+ // there is some data to read now
+});
+```
+
+Once the internal buffer is drained, a `readable` event will fire
+again when more data is available.
+
+The `readable` event is not emitted in the "flowing" mode with the
+sole exception of the last one, on end-of-stream.
+
+The 'readable' event indicates that the stream has new information:
+either new data is available or the end of the stream has been reached.
+In the former case, `.read()` will return that data. In the latter case,
+`.read()` will return null. For instance, in the following example, `foo.txt`
+is an empty file:
+
+```javascript
+var fs = require('fs');
+var rr = fs.createReadStream('foo.txt');
+rr.on('readable', function() {
+ console.log('readable:', rr.read());
+});
+rr.on('end', function() {
+ console.log('end');
+});
+```
+
+The output of running this script is:
+
+```
+bash-3.2$ node test.js
+readable: null
+end
+```
+
+#### Event: 'data'
+
+* `chunk` {Buffer | String} The chunk of data.
+
+Attaching a `data` event listener to a stream that has not been
+explicitly paused will switch the stream into flowing mode. Data will
+then be passed as soon as it is available.
+
+If you just want to get all the data out of the stream as fast as
+possible, this is the best way to do so.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('data', function(chunk) {
+ console.log('got %d bytes of data', chunk.length);
+});
+```
+
+#### Event: 'end'
+
+This event fires when there will be no more data to read.
+
+Note that the `end` event **will not fire** unless the data is
+completely consumed. This can be done by switching into flowing mode,
+or by calling `read()` repeatedly until you get to the end.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('data', function(chunk) {
+ console.log('got %d bytes of data', chunk.length);
+});
+readable.on('end', function() {
+ console.log('there will be no more data.');
+});
+```
+
+#### Event: 'close'
+
+Emitted when the stream and any of its underlying resources (a file
+descriptor, for example) have been closed. The event indicates that
+no more events will be emitted, and no further computation will occur.
+
+Not all streams will emit the 'close' event.
+
+#### Event: 'error'
+
+* {Error Object}
+
+Emitted if there was an error receiving data.
+
+#### readable.read([size])
+
+* `size` {Number} Optional argument to specify how much data to read.
+* Return {String | Buffer | null}
+
+The `read()` method pulls some data out of the internal buffer and
+returns it. If there is no data available, then it will return
+`null`.
+
+If you pass in a `size` argument, then it will return that many
+bytes. If `size` bytes are not available, then it will return `null`,
+unless we've ended, in which case it will return the data remaining
+in the buffer.
+
+If you do not specify a `size` argument, then it will return all the
+data in the internal buffer.
+
+This method should only be called in paused mode. In flowing mode,
+this method is called automatically until the internal buffer is
+drained.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('readable', function() {
+ var chunk;
+ while (null !== (chunk = readable.read())) {
+ console.log('got %d bytes of data', chunk.length);
+ }
+});
+```
+
+If this method returns a data chunk, then it will also trigger the
+emission of a [`'data'` event][].
+
+Note that calling `readable.read([size])` after the `end` event has been
+triggered will return `null`. No runtime error will be raised.
+
+#### readable.setEncoding(encoding)
+
+* `encoding` {String} The encoding to use.
+* Return: `this`
+
+Call this function to cause the stream to return strings of the
+specified encoding instead of Buffer objects. For example, if you do
+`readable.setEncoding('utf8')`, then the output data will be
+interpreted as UTF-8 data, and returned as strings. If you do
+`readable.setEncoding('hex')`, then the data will be encoded in
+hexadecimal string format.
+
+This properly handles multi-byte characters that would otherwise be
+potentially mangled if you simply pulled the Buffers directly and
+called `buf.toString(encoding)` on them. If you want to read the data
+as strings, always use this method.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.setEncoding('utf8');
+readable.on('data', function(chunk) {
+ assert.equal(typeof chunk, 'string');
+ console.log('got %d characters of string data', chunk.length);
+});
+```
+
+#### readable.resume()
+
+* Return: `this`
+
+This method will cause the readable stream to resume emitting `data`
+events.
+
+This method will switch the stream into flowing mode. If you do *not*
+want to consume the data from a stream, but you *do* want to get to
+its `end` event, you can call [`readable.resume()`][] to open the flow of
+data.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.resume();
+readable.on('end', function() {
+ console.log('got to the end, but did not read anything');
+});
+```
+
+#### readable.pause()
+
+* Return: `this`
+
+This method will cause a stream in flowing mode to stop emitting
+`data` events, switching out of flowing mode. Any data that becomes
+available will remain in the internal buffer.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('data', function(chunk) {
+ console.log('got %d bytes of data', chunk.length);
+ readable.pause();
+ console.log('there will be no more data for 1 second');
+ setTimeout(function() {
+ console.log('now data will start flowing again');
+ readable.resume();
+ }, 1000);
+});
+```
+
+#### readable.isPaused()
+
+* Return: `Boolean`
+
+This method returns whether or not the `readable` has been **explicitly**
+paused by client code (using `readable.pause()` without a corresponding
+`readable.resume()`).
+
+```javascript
+var readable = new stream.Readable
+
+readable.isPaused() // === false
+readable.pause()
+readable.isPaused() // === true
+readable.resume()
+readable.isPaused() // === false
+```
+
+#### readable.pipe(destination[, options])
+
+* `destination` {[Writable][] Stream} The destination for writing data
+* `options` {Object} Pipe options
+ * `end` {Boolean} End the writer when the reader ends. Default = `true`
+
+This method pulls all the data out of a readable stream, and writes it
+to the supplied destination, automatically managing the flow so that
+the destination is not overwhelmed by a fast readable stream.
+
+Multiple destinations can be piped to safely.
+
+```javascript
+var readable = getReadableStreamSomehow();
+var writable = fs.createWriteStream('file.txt');
+// All the data from readable goes into 'file.txt'
+readable.pipe(writable);
+```
+
+This function returns the destination stream, so you can set up pipe
+chains like so:
+
+```javascript
+var r = fs.createReadStream('file.txt');
+var z = zlib.createGzip();
+var w = fs.createWriteStream('file.txt.gz');
+r.pipe(z).pipe(w);
+```
+
+For example, emulating the Unix `cat` command:
+
+```javascript
+process.stdin.pipe(process.stdout);
+```
+
+By default [`end()`][] is called on the destination when the source stream
+emits `end`, so that `destination` is no longer writable. Pass `{ end:
+false }` as `options` to keep the destination stream open.
+
+This keeps `writer` open so that "Goodbye" can be written at the
+end.
+
+```javascript
+reader.pipe(writer, { end: false });
+reader.on('end', function() {
+ writer.end('Goodbye\n');
+});
+```
+
+Note that `process.stderr` and `process.stdout` are never closed until
+the process exits, regardless of the specified options.
+
+#### readable.unpipe([destination])
+
+* `destination` {[Writable][] Stream} Optional specific stream to unpipe
+
+This method will remove the hooks set up for a previous `pipe()` call.
+
+If the destination is not specified, then all pipes are removed.
+
+If the destination is specified, but no pipe is set up for it, then
+this is a no-op.
+
+```javascript
+var readable = getReadableStreamSomehow();
+var writable = fs.createWriteStream('file.txt');
+// All the data from readable goes into 'file.txt',
+// but only for the first second
+readable.pipe(writable);
+setTimeout(function() {
+ console.log('stop writing to file.txt');
+ readable.unpipe(writable);
+ console.log('manually close the file stream');
+ writable.end();
+}, 1000);
+```
+
+#### readable.unshift(chunk)
+
+* `chunk` {Buffer | String} Chunk of data to unshift onto the read queue
+
+This is useful in certain cases where a stream is being consumed by a
+parser, which needs to "un-consume" some data that it has
+optimistically pulled out of the source, so that the stream can be
+passed on to some other party.
+
+Note that `stream.unshift(chunk)` cannot be called after the `end` event
+has been triggered; a runtime error will be raised.
+
+If you find that you must often call `stream.unshift(chunk)` in your
+programs, consider implementing a [Transform][] stream instead. (See API
+for Stream Implementors, below.)
+
+```javascript
+// Pull off a header delimited by \n\n
+// use unshift() if we get too much
+// Call the callback with (error, header, stream)
+var StringDecoder = require('string_decoder').StringDecoder;
+function parseHeader(stream, callback) {
+ stream.on('error', callback);
+ stream.on('readable', onReadable);
+ var decoder = new StringDecoder('utf8');
+ var header = '';
+ function onReadable() {
+ var chunk;
+ while (null !== (chunk = stream.read())) {
+ var str = decoder.write(chunk);
+ if (str.match(/\n\n/)) {
+ // found the header boundary
+ var split = str.split(/\n\n/);
+ header += split.shift();
+ var remaining = split.join('\n\n');
+ var buf = new Buffer(remaining, 'utf8');
+ if (buf.length)
+ stream.unshift(buf);
+ stream.removeListener('error', callback);
+ stream.removeListener('readable', onReadable);
+ // now the body of the message can be read from the stream.
+ callback(null, header, stream);
+ } else {
+ // still reading the header.
+ header += str;
+ }
+ }
+ }
+}
+```
+Note that, unlike `stream.push(chunk)`, `stream.unshift(chunk)` will not
+end the reading process by resetting the internal reading state of the
+stream. This can cause unexpected results if `unshift` is called during a
+read (i.e. from within a `_read` implementation on a custom stream). Following
+the call to `unshift` with an immediate `stream.push('')` will reset the
+reading state appropriately, however it is best to simply avoid calling
+`unshift` while in the process of performing a read.
+
+#### readable.wrap(stream)
+
+* `stream` {Stream} An "old style" readable stream
+
+Versions of Node.js prior to v0.10 had streams that did not implement the
+entire Streams API as it is today. (See "Compatibility" below for
+more information.)
+
+If you are using an older Node.js library that emits `'data'` events and
+has a [`pause()`][] method that is advisory only, then you can use the
+`wrap()` method to create a [Readable][] stream that uses the old stream
+as its data source.
+
+You will very rarely ever need to call this function, but it exists
+as a convenience for interacting with old Node.js programs and libraries.
+
+For example:
+
+```javascript
+var OldReader = require('./old-api-module.js').OldReader;
+var oreader = new OldReader;
+var Readable = require('stream').Readable;
+var myReader = new Readable().wrap(oreader);
+
+myReader.on('readable', function() {
+ myReader.read(); // etc.
+});
+```
+
+
+### Class: stream.Writable
+
+<!--type=class-->
+
+The Writable stream interface is an abstraction for a *destination*
+that you are writing data *to*.
+
+Examples of writable streams include:
+
+* [http requests, on the client](https://iojs.org/dist/v4.1.2/doc/api/http.html#http_class_http_clientrequest)
+* [http responses, on the server](https://iojs.org/dist/v4.1.2/doc/api/http.html#http_class_http_serverresponse)
+* [fs write streams](https://iojs.org/dist/v4.1.2/doc/api/fs.html#fs_class_fs_writestream)
+* [zlib streams][]
+* [crypto streams][]
+* [tcp sockets][]
+* [child process stdin](https://iojs.org/dist/v4.1.2/doc/api/child_process.html#child_process_child_stdin)
+* [process.stdout][], [process.stderr][]
+
+#### writable.write(chunk[, encoding][, callback])
+
+* `chunk` {String | Buffer} The data to write
+* `encoding` {String} The encoding, if `chunk` is a String
+* `callback` {Function} Callback for when this chunk of data is flushed
+* Returns: {Boolean} True if the data was handled completely.
+
+This method writes some data to the underlying system, and calls the
+supplied callback once the data has been fully handled.
+
+The return value indicates if you should continue writing right now.
+If the data had to be buffered internally, then it will return
+`false`. Otherwise, it will return `true`.
+
+This return value is strictly advisory. You MAY continue to write,
+even if it returns `false`. However, writes will be buffered in
+memory, so it is best not to do this excessively. Instead, wait for
+the `drain` event before writing more data.
+
+#### Event: 'drain'
+
+If a [`writable.write(chunk)`][] call returns false, then the `drain`
+event will indicate when it is appropriate to begin writing more data
+to the stream.
+
+```javascript
+// Write the data to the supplied writable stream one million times.
+// Be attentive to back-pressure.
+function writeOneMillionTimes(writer, data, encoding, callback) {
+ var i = 1000000;
+ write();
+ function write() {
+ var ok = true;
+ do {
+ i -= 1;
+ if (i === 0) {
+ // last time!
+ writer.write(data, encoding, callback);
+ } else {
+ // see if we should continue, or wait
+ // don't pass the callback, because we're not done yet.
+ ok = writer.write(data, encoding);
+ }
+ } while (i > 0 && ok);
+ if (i > 0) {
+ // had to stop early!
+ // write some more once it drains
+ writer.once('drain', write);
+ }
+ }
+}
+```
+
+#### writable.cork()
+
+Forces buffering of all writes.
+
+Buffered data will be flushed either at `.uncork()` or at `.end()` call.
+
+#### writable.uncork()
+
+Flush all data, buffered since `.cork()` call.
+
+#### writable.setDefaultEncoding(encoding)
+
+* `encoding` {String} The new default encoding
+
+Sets the default encoding for a writable stream.
+
+#### writable.end([chunk][, encoding][, callback])
+
+* `chunk` {String | Buffer} Optional data to write
+* `encoding` {String} The encoding, if `chunk` is a String
+* `callback` {Function} Optional callback for when the stream is finished
+
+Call this method when no more data will be written to the stream. If
+supplied, the callback is attached as a listener on the `finish` event.
+
+Calling [`write()`][] after calling [`end()`][] will raise an error.
+
+```javascript
+// write 'hello, ' and then end with 'world!'
+var file = fs.createWriteStream('example.txt');
+file.write('hello, ');
+file.end('world!');
+// writing more now is not allowed!
+```
+
+#### Event: 'finish'
+
+When the [`end()`][] method has been called, and all data has been flushed
+to the underlying system, this event is emitted.
+
+```javascript
+var writer = getWritableStreamSomehow();
+for (var i = 0; i < 100; i ++) {
+ writer.write('hello, #' + i + '!\n');
+}
+writer.end('this is the end\n');
+writer.on('finish', function() {
+ console.error('all writes are now complete.');
+});
+```
+
+#### Event: 'pipe'
+
+* `src` {[Readable][] Stream} source stream that is piping to this writable
+
+This is emitted whenever the `pipe()` method is called on a readable
+stream, adding this writable to its set of destinations.
+
+```javascript
+var writer = getWritableStreamSomehow();
+var reader = getReadableStreamSomehow();
+writer.on('pipe', function(src) {
+ console.error('something is piping into the writer');
+ assert.equal(src, reader);
+});
+reader.pipe(writer);
+```
+
+#### Event: 'unpipe'
+
+* `src` {[Readable][] Stream} The source stream that [unpiped][] this writable
+
+This is emitted whenever the [`unpipe()`][] method is called on a
+readable stream, removing this writable from its set of destinations.
+
+```javascript
+var writer = getWritableStreamSomehow();
+var reader = getReadableStreamSomehow();
+writer.on('unpipe', function(src) {
+ console.error('something has stopped piping into the writer');
+ assert.equal(src, reader);
+});
+reader.pipe(writer);
+reader.unpipe(writer);
+```
+
+#### Event: 'error'
+
+* {Error object}
+
+Emitted if there was an error when writing or piping data.
+
+### Class: stream.Duplex
+
+Duplex streams are streams that implement both the [Readable][] and
+[Writable][] interfaces. See above for usage.
+
+Examples of Duplex streams include:
+
+* [tcp sockets][]
+* [zlib streams][]
+* [crypto streams][]
+
+
+### Class: stream.Transform
+
+Transform streams are [Duplex][] streams where the output is in some way
+computed from the input. They implement both the [Readable][] and
+[Writable][] interfaces. See above for usage.
+
+Examples of Transform streams include:
+
+* [zlib streams][]
+* [crypto streams][]
+
+
+## API for Stream Implementors
+
+<!--type=misc-->
+
+To implement any sort of stream, the pattern is the same:
+
+1. Extend the appropriate parent class in your own subclass. (The
+ [`util.inherits`][] method is particularly helpful for this.)
+2. Call the appropriate parent class constructor in your constructor,
+ to be sure that the internal mechanisms are set up properly.
+2. Implement one or more specific methods, as detailed below.
+
+The class to extend and the method(s) to implement depend on the sort
+of stream class you are writing:
+
+<table>
+ <thead>
+ <tr>
+ <th>
+ <p>Use-case</p>
+ </th>
+ <th>
+ <p>Class</p>
+ </th>
+ <th>
+ <p>Method(s) to implement</p>
+ </th>
+ </tr>
+ </thead>
+ <tr>
+ <td>
+ <p>Reading only</p>
+ </td>
+ <td>
+ <p>[Readable](#stream_class_stream_readable_1)</p>
+ </td>
+ <td>
+ <p><code>[_read][]</code></p>
+ </td>
+ </tr>
+ <tr>
+ <td>
+ <p>Writing only</p>
+ </td>
+ <td>
+ <p>[Writable](#stream_class_stream_writable_1)</p>
+ </td>
+ <td>
+ <p><code>[_write][]</code>, <code>_writev</code></p>
+ </td>
+ </tr>
+ <tr>
+ <td>
+ <p>Reading and writing</p>
+ </td>
+ <td>
+ <p>[Duplex](#stream_class_stream_duplex_1)</p>
+ </td>
+ <td>
+ <p><code>[_read][]</code>, <code>[_write][]</code>, <code>_writev</code></p>
+ </td>
+ </tr>
+ <tr>
+ <td>
+ <p>Operate on written data, then read the result</p>
+ </td>
+ <td>
+ <p>[Transform](#stream_class_stream_transform_1)</p>
+ </td>
+ <td>
+ <p><code>_transform</code>, <code>_flush</code></p>
+ </td>
+ </tr>
+</table>
+
+In your implementation code, it is very important to never call the
+methods described in [API for Stream Consumers][] above. Otherwise, you
+can potentially cause adverse side effects in programs that consume
+your streaming interfaces.
+
+### Class: stream.Readable
+
+<!--type=class-->
+
+`stream.Readable` is an abstract class designed to be extended with an
+underlying implementation of the [`_read(size)`][] method.
+
+Please see above under [API for Stream Consumers][] for how to consume
+streams in your programs. What follows is an explanation of how to
+implement Readable streams in your programs.
+
+#### Example: A Counting Stream
+
+<!--type=example-->
+
+This is a basic example of a Readable stream. It emits the numerals
+from 1 to 1,000,000 in ascending order, and then ends.
+
+```javascript
+var Readable = require('stream').Readable;
+var util = require('util');
+util.inherits(Counter, Readable);
+
+function Counter(opt) {
+ Readable.call(this, opt);
+ this._max = 1000000;
+ this._index = 1;
+}
+
+Counter.prototype._read = function() {
+ var i = this._index++;
+ if (i > this._max)
+ this.push(null);
+ else {
+ var str = '' + i;
+ var buf = new Buffer(str, 'ascii');
+ this.push(buf);
+ }
+};
+```
+
+#### Example: SimpleProtocol v1 (Sub-optimal)
+
+This is similar to the `parseHeader` function described above, but
+implemented as a custom stream. Also, note that this implementation
+does not convert the incoming data to a string.
+
+However, this would be better implemented as a [Transform][] stream. See
+below for a better implementation.
+
+```javascript
+// A parser for a simple data protocol.
+// The "header" is a JSON object, followed by 2 \n characters, and
+// then a message body.
+//
+// NOTE: This can be done more simply as a Transform stream!
+// Using Readable directly for this is sub-optimal. See the
+// alternative example below under the Transform section.
+
+var Readable = require('stream').Readable;
+var util = require('util');
+
+util.inherits(SimpleProtocol, Readable);
+
+function SimpleProtocol(source, options) {
+ if (!(this instanceof SimpleProtocol))
+ return new SimpleProtocol(source, options);
+
+ Readable.call(this, options);
+ this._inBody = false;
+ this._sawFirstCr = false;
+
+ // source is a readable stream, such as a socket or file
+ this._source = source;
+
+ var self = this;
+ source.on('end', function() {
+ self.push(null);
+ });
+
+ // give it a kick whenever the source is readable
+ // read(0) will not consume any bytes
+ source.on('readable', function() {
+ self.read(0);
+ });
+
+ this._rawHeader = [];
+ this.header = null;
+}
+
+SimpleProtocol.prototype._read = function(n) {
+ if (!this._inBody) {
+ var chunk = this._source.read();
+
+ // if the source doesn't have data, we don't have data yet.
+ if (chunk === null)
+ return this.push('');
+
+ // check if the chunk has a \n\n
+ var split = -1;
+ for (var i = 0; i < chunk.length; i++) {
+ if (chunk[i] === 10) { // '\n'
+ if (this._sawFirstCr) {
+ split = i;
+ break;
+ } else {
+ this._sawFirstCr = true;
+ }
+ } else {
+ this._sawFirstCr = false;
+ }
+ }
+
+ if (split === -1) {
+ // still waiting for the \n\n
+ // stash the chunk, and try again.
+ this._rawHeader.push(chunk);
+ this.push('');
+ } else {
+ this._inBody = true;
+ var h = chunk.slice(0, split);
+ this._rawHeader.push(h);
+ var header = Buffer.concat(this._rawHeader).toString();
+ try {
+ this.header = JSON.parse(header);
+ } catch (er) {
+ this.emit('error', new Error('invalid simple protocol data'));
+ return;
+ }
+ // now, because we got some extra data, unshift the rest
+ // back into the read queue so that our consumer will see it.
+ var b = chunk.slice(split);
+ this.unshift(b);
+ // calling unshift by itself does not reset the reading state
+ // of the stream; since we're inside _read, doing an additional
+ // push('') will reset the state appropriately.
+ this.push('');
+
+ // and let them know that we are done parsing the header.
+ this.emit('header', this.header);
+ }
+ } else {
+ // from there on, just provide the data to our consumer.
+ // careful not to push(null), since that would indicate EOF.
+ var chunk = this._source.read();
+ if (chunk) this.push(chunk);
+ }
+};
+
+// Usage:
+// var parser = new SimpleProtocol(source);
+// Now parser is a readable stream that will emit 'header'
+// with the parsed header data.
+```
+
+
+#### new stream.Readable([options])
+
+* `options` {Object}
+ * `highWaterMark` {Number} The maximum number of bytes to store in
+ the internal buffer before ceasing to read from the underlying
+ resource. Default=16kb, or 16 for `objectMode` streams
+ * `encoding` {String} If specified, then buffers will be decoded to
+ strings using the specified encoding. Default=null
+ * `objectMode` {Boolean} Whether this stream should behave
+ as a stream of objects. Meaning that stream.read(n) returns
+ a single value instead of a Buffer of size n. Default=false
+
+In classes that extend the Readable class, make sure to call the
+Readable constructor so that the buffering settings can be properly
+initialized.
+
+#### readable.\_read(size)
+
+* `size` {Number} Number of bytes to read asynchronously
+
+Note: **Implement this method, but do NOT call it directly.**
+
+This method is prefixed with an underscore because it is internal to the
+class that defines it and should only be called by the internal Readable
+class methods. All Readable stream implementations must provide a _read
+method to fetch data from the underlying resource.
+
+When _read is called, if data is available from the resource, `_read` should
+start pushing that data into the read queue by calling `this.push(dataChunk)`.
+`_read` should continue reading from the resource and pushing data until push
+returns false, at which point it should stop reading from the resource. Only
+when _read is called again after it has stopped should it start reading
+more data from the resource and pushing that data onto the queue.
+
+Note: once the `_read()` method is called, it will not be called again until
+the `push` method is called.
+
+The `size` argument is advisory. Implementations where a "read" is a
+single call that returns data can use this to know how much data to
+fetch. Implementations where that is not relevant, such as TCP or
+TLS, may ignore this argument, and simply provide data whenever it
+becomes available. There is no need, for example to "wait" until
+`size` bytes are available before calling [`stream.push(chunk)`][].
+
+#### readable.push(chunk[, encoding])
+
+* `chunk` {Buffer | null | String} Chunk of data to push into the read queue
+* `encoding` {String} Encoding of String chunks. Must be a valid
+ Buffer encoding, such as `'utf8'` or `'ascii'`
+* return {Boolean} Whether or not more pushes should be performed
+
+Note: **This method should be called by Readable implementors, NOT
+by consumers of Readable streams.**
+
+If a value other than null is passed, The `push()` method adds a chunk of data
+into the queue for subsequent stream processors to consume. If `null` is
+passed, it signals the end of the stream (EOF), after which no more data
+can be written.
+
+The data added with `push` can be pulled out by calling the `read()` method
+when the `'readable'`event fires.
+
+This API is designed to be as flexible as possible. For example,
+you may be wrapping a lower-level source which has some sort of
+pause/resume mechanism, and a data callback. In those cases, you
+could wrap the low-level source object by doing something like this:
+
+```javascript
+// source is an object with readStop() and readStart() methods,
+// and an `ondata` member that gets called when it has data, and
+// an `onend` member that gets called when the data is over.
+
+util.inherits(SourceWrapper, Readable);
+
+function SourceWrapper(options) {
+ Readable.call(this, options);
+
+ this._source = getLowlevelSourceObject();
+ var self = this;
+
+ // Every time there's data, we push it into the internal buffer.
+ this._source.ondata = function(chunk) {
+ // if push() returns false, then we need to stop reading from source
+ if (!self.push(chunk))
+ self._source.readStop();
+ };
+
+ // When the source ends, we push the EOF-signaling `null` chunk
+ this._source.onend = function() {
+ self.push(null);
+ };
+}
+
+// _read will be called when the stream wants to pull more data in
+// the advisory size argument is ignored in this case.
+SourceWrapper.prototype._read = function(size) {
+ this._source.readStart();
+};
+```
+
+
+### Class: stream.Writable
+
+<!--type=class-->
+
+`stream.Writable` is an abstract class designed to be extended with an
+underlying implementation of the [`_write(chunk, encoding, callback)`][] method.
+
+Please see above under [API for Stream Consumers][] for how to consume
+writable streams in your programs. What follows is an explanation of
+how to implement Writable streams in your programs.
+
+#### new stream.Writable([options])
+
+* `options` {Object}
+ * `highWaterMark` {Number} Buffer level when [`write()`][] starts
+ returning false. Default=16kb, or 16 for `objectMode` streams
+ * `decodeStrings` {Boolean} Whether or not to decode strings into
+ Buffers before passing them to [`_write()`][]. Default=true
+ * `objectMode` {Boolean} Whether or not the `write(anyObj)` is
+ a valid operation. If set you can write arbitrary data instead
+ of only `Buffer` / `String` data. Default=false
+
+In classes that extend the Writable class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+#### writable.\_write(chunk, encoding, callback)
+
+* `chunk` {Buffer | String} The chunk to be written. Will **always**
+ be a buffer unless the `decodeStrings` option was set to `false`.
+* `encoding` {String} If the chunk is a string, then this is the
+ encoding type. If chunk is a buffer, then this is the special
+ value - 'buffer', ignore it in this case.
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done processing the supplied chunk.
+
+All Writable stream implementations must provide a [`_write()`][]
+method to send data to the underlying resource.
+
+Note: **This function MUST NOT be called directly.** It should be
+implemented by child classes, and called by the internal Writable
+class methods only.
+
+Call the callback using the standard `callback(error)` pattern to
+signal that the write completed successfully or with an error.
+
+If the `decodeStrings` flag is set in the constructor options, then
+`chunk` may be a string rather than a Buffer, and `encoding` will
+indicate the sort of string that it is. This is to support
+implementations that have an optimized handling for certain string
+data encodings. If you do not explicitly set the `decodeStrings`
+option to `false`, then you can safely ignore the `encoding` argument,
+and assume that `chunk` will always be a Buffer.
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### writable.\_writev(chunks, callback)
+
+* `chunks` {Array} The chunks to be written. Each chunk has following
+ format: `{ chunk: ..., encoding: ... }`.
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done processing the supplied chunks.
+
+Note: **This function MUST NOT be called directly.** It may be
+implemented by child classes, and called by the internal Writable
+class methods only.
+
+This function is completely optional to implement. In most cases it is
+unnecessary. If implemented, it will be called with all the chunks
+that are buffered in the write queue.
+
+
+### Class: stream.Duplex
+
+<!--type=class-->
+
+A "duplex" stream is one that is both Readable and Writable, such as a
+TCP socket connection.
+
+Note that `stream.Duplex` is an abstract class designed to be extended
+with an underlying implementation of the `_read(size)` and
+[`_write(chunk, encoding, callback)`][] methods as you would with a
+Readable or Writable stream class.
+
+Since JavaScript doesn't have multiple prototypal inheritance, this
+class prototypally inherits from Readable, and then parasitically from
+Writable. It is thus up to the user to implement both the lowlevel
+`_read(n)` method as well as the lowlevel
+[`_write(chunk, encoding, callback)`][] method on extension duplex classes.
+
+#### new stream.Duplex(options)
+
+* `options` {Object} Passed to both Writable and Readable
+ constructors. Also has the following fields:
+ * `allowHalfOpen` {Boolean} Default=true. If set to `false`, then
+ the stream will automatically end the readable side when the
+ writable side ends and vice versa.
+ * `readableObjectMode` {Boolean} Default=false. Sets `objectMode`
+ for readable side of the stream. Has no effect if `objectMode`
+ is `true`.
+ * `writableObjectMode` {Boolean} Default=false. Sets `objectMode`
+ for writable side of the stream. Has no effect if `objectMode`
+ is `true`.
+
+In classes that extend the Duplex class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+
+### Class: stream.Transform
+
+A "transform" stream is a duplex stream where the output is causally
+connected in some way to the input, such as a [zlib][] stream or a
+[crypto][] stream.
+
+There is no requirement that the output be the same size as the input,
+the same number of chunks, or arrive at the same time. For example, a
+Hash stream will only ever have a single chunk of output which is
+provided when the input is ended. A zlib stream will produce output
+that is either much smaller or much larger than its input.
+
+Rather than implement the [`_read()`][] and [`_write()`][] methods, Transform
+classes must implement the `_transform()` method, and may optionally
+also implement the `_flush()` method. (See below.)
+
+#### new stream.Transform([options])
+
+* `options` {Object} Passed to both Writable and Readable
+ constructors.
+
+In classes that extend the Transform class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+#### transform.\_transform(chunk, encoding, callback)
+
+* `chunk` {Buffer | String} The chunk to be transformed. Will **always**
+ be a buffer unless the `decodeStrings` option was set to `false`.
+* `encoding` {String} If the chunk is a string, then this is the
+ encoding type. If chunk is a buffer, then this is the special
+ value - 'buffer', ignore it in this case.
+* `callback` {Function} Call this function (optionally with an error
+ argument and data) when you are done processing the supplied chunk.
+
+Note: **This function MUST NOT be called directly.** It should be
+implemented by child classes, and called by the internal Transform
+class methods only.
+
+All Transform stream implementations must provide a `_transform`
+method to accept input and produce output.
+
+`_transform` should do whatever has to be done in this specific
+Transform class, to handle the bytes being written, and pass them off
+to the readable portion of the interface. Do asynchronous I/O,
+process things, and so on.
+
+Call `transform.push(outputChunk)` 0 or more times to generate output
+from this input chunk, depending on how much data you want to output
+as a result of this chunk.
+
+Call the callback function only when the current chunk is completely
+consumed. Note that there may or may not be output as a result of any
+particular input chunk. If you supply a second argument to the callback
+it will be passed to the push method. In other words the following are
+equivalent:
+
+```javascript
+transform.prototype._transform = function (data, encoding, callback) {
+ this.push(data);
+ callback();
+};
+
+transform.prototype._transform = function (data, encoding, callback) {
+ callback(null, data);
+};
+```
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### transform.\_flush(callback)
+
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done flushing any remaining data.
+
+Note: **This function MUST NOT be called directly.** It MAY be implemented
+by child classes, and if so, will be called by the internal Transform
+class methods only.
+
+In some cases, your transform operation may need to emit a bit more
+data at the end of the stream. For example, a `Zlib` compression
+stream will store up some internal state so that it can optimally
+compress the output. At the end, however, it needs to do the best it
+can with what is left, so that the data will be complete.
+
+In those cases, you can implement a `_flush` method, which will be
+called at the very end, after all the written data is consumed, but
+before emitting `end` to signal the end of the readable side. Just
+like with `_transform`, call `transform.push(chunk)` zero or more
+times, as appropriate, and call `callback` when the flush operation is
+complete.
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### Events: 'finish' and 'end'
+
+The [`finish`][] and [`end`][] events are from the parent Writable
+and Readable classes respectively. The `finish` event is fired after
+`.end()` is called and all chunks have been processed by `_transform`,
+`end` is fired after all data has been output which is after the callback
+in `_flush` has been called.
+
+#### Example: `SimpleProtocol` parser v2
+
+The example above of a simple protocol parser can be implemented
+simply by using the higher level [Transform][] stream class, similar to
+the `parseHeader` and `SimpleProtocol v1` examples above.
+
+In this example, rather than providing the input as an argument, it
+would be piped into the parser, which is a more idiomatic Node.js stream
+approach.
+
+```javascript
+var util = require('util');
+var Transform = require('stream').Transform;
+util.inherits(SimpleProtocol, Transform);
+
+function SimpleProtocol(options) {
+ if (!(this instanceof SimpleProtocol))
+ return new SimpleProtocol(options);
+
+ Transform.call(this, options);
+ this._inBody = false;
+ this._sawFirstCr = false;
+ this._rawHeader = [];
+ this.header = null;
+}
+
+SimpleProtocol.prototype._transform = function(chunk, encoding, done) {
+ if (!this._inBody) {
+ // check if the chunk has a \n\n
+ var split = -1;
+ for (var i = 0; i < chunk.length; i++) {
+ if (chunk[i] === 10) { // '\n'
+ if (this._sawFirstCr) {
+ split = i;
+ break;
+ } else {
+ this._sawFirstCr = true;
+ }
+ } else {
+ this._sawFirstCr = false;
+ }
+ }
+
+ if (split === -1) {
+ // still waiting for the \n\n
+ // stash the chunk, and try again.
+ this._rawHeader.push(chunk);
+ } else {
+ this._inBody = true;
+ var h = chunk.slice(0, split);
+ this._rawHeader.push(h);
+ var header = Buffer.concat(this._rawHeader).toString();
+ try {
+ this.header = JSON.parse(header);
+ } catch (er) {
+ this.emit('error', new Error('invalid simple protocol data'));
+ return;
+ }
+ // and let them know that we are done parsing the header.
+ this.emit('header', this.header);
+
+ // now, because we got some extra data, emit this first.
+ this.push(chunk.slice(split));
+ }
+ } else {
+ // from there on, just provide the data to our consumer as-is.
+ this.push(chunk);
+ }
+ done();
+};
+
+// Usage:
+// var parser = new SimpleProtocol();
+// source.pipe(parser)
+// Now parser is a readable stream that will emit 'header'
+// with the parsed header data.
+```
+
+
+### Class: stream.PassThrough
+
+This is a trivial implementation of a [Transform][] stream that simply
+passes the input bytes across to the output. Its purpose is mainly
+for examples and testing, but there are occasionally use cases where
+it can come in handy as a building block for novel sorts of streams.
+
+
+## Simplified Constructor API
+
+<!--type=misc-->
+
+In simple cases there is now the added benefit of being able to construct a stream without inheritance.
+
+This can be done by passing the appropriate methods as constructor options:
+
+Examples:
+
+### Readable
+```javascript
+var readable = new stream.Readable({
+ read: function(n) {
+ // sets this._read under the hood
+ }
+});
+```
+
+### Writable
+```javascript
+var writable = new stream.Writable({
+ write: function(chunk, encoding, next) {
+ // sets this._write under the hood
+ }
+});
+
+// or
+
+var writable = new stream.Writable({
+ writev: function(chunks, next) {
+ // sets this._writev under the hood
+ }
+});
+```
+
+### Duplex
+```javascript
+var duplex = new stream.Duplex({
+ read: function(n) {
+ // sets this._read under the hood
+ },
+ write: function(chunk, encoding, next) {
+ // sets this._write under the hood
+ }
+});
+
+// or
+
+var duplex = new stream.Duplex({
+ read: function(n) {
+ // sets this._read under the hood
+ },
+ writev: function(chunks, next) {
+ // sets this._writev under the hood
+ }
+});
+```
+
+### Transform
+```javascript
+var transform = new stream.Transform({
+ transform: function(chunk, encoding, next) {
+ // sets this._transform under the hood
+ },
+ flush: function(done) {
+ // sets this._flush under the hood
+ }
+});
+```
+
+## Streams: Under the Hood
+
+<!--type=misc-->
+
+### Buffering
+
+<!--type=misc-->
+
+Both Writable and Readable streams will buffer data on an internal
+object which can be retrieved from `_writableState.getBuffer()` or
+`_readableState.buffer`, respectively.
+
+The amount of data that will potentially be buffered depends on the
+`highWaterMark` option which is passed into the constructor.
+
+Buffering in Readable streams happens when the implementation calls
+[`stream.push(chunk)`][]. If the consumer of the Stream does not call
+`stream.read()`, then the data will sit in the internal queue until it
+is consumed.
+
+Buffering in Writable streams happens when the user calls
+[`stream.write(chunk)`][] repeatedly, even when `write()` returns `false`.
+
+The purpose of streams, especially with the `pipe()` method, is to
+limit the buffering of data to acceptable levels, so that sources and
+destinations of varying speed will not overwhelm the available memory.
+
+### `stream.read(0)`
+
+There are some cases where you want to trigger a refresh of the
+underlying readable stream mechanisms, without actually consuming any
+data. In that case, you can call `stream.read(0)`, which will always
+return null.
+
+If the internal read buffer is below the `highWaterMark`, and the
+stream is not currently reading, then calling `read(0)` will trigger
+a low-level `_read` call.
+
+There is almost never a need to do this. However, you will see some
+cases in Node.js's internals where this is done, particularly in the
+Readable stream class internals.
+
+### `stream.push('')`
+
+Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an
+interesting side effect. Because it *is* a call to
+[`stream.push()`][], it will end the `reading` process. However, it
+does *not* add any data to the readable buffer, so there's nothing for
+a user to consume.
+
+Very rarely, there are cases where you have no data to provide now,
+but the consumer of your stream (or, perhaps, another bit of your own
+code) will know when to check again, by calling `stream.read(0)`. In
+those cases, you *may* call `stream.push('')`.
+
+So far, the only use case for this functionality is in the
+[tls.CryptoStream][] class, which is deprecated in Node.js/io.js v1.0. If you
+find that you have to use `stream.push('')`, please consider another
+approach, because it almost certainly indicates that something is
+horribly wrong.
+
+### Compatibility with Older Node.js Versions
+
+<!--type=misc-->
+
+In versions of Node.js prior to v0.10, the Readable stream interface was
+simpler, but also less powerful and less useful.
+
+* Rather than waiting for you to call the `read()` method, `'data'`
+ events would start emitting immediately. If you needed to do some
+ I/O to decide how to handle data, then you had to store the chunks
+ in some kind of buffer so that they would not be lost.
+* The [`pause()`][] method was advisory, rather than guaranteed. This
+ meant that you still had to be prepared to receive `'data'` events
+ even when the stream was in a paused state.
+
+In Node.js v0.10, the Readable class described below was added.
+For backwards compatibility with older Node.js programs, Readable streams
+switch into "flowing mode" when a `'data'` event handler is added, or
+when the [`resume()`][] method is called. The effect is that, even if
+you are not using the new `read()` method and `'readable'` event, you
+no longer have to worry about losing `'data'` chunks.
+
+Most programs will continue to function normally. However, this
+introduces an edge case in the following conditions:
+
+* No [`'data'` event][] handler is added.
+* The [`resume()`][] method is never called.
+* The stream is not piped to any writable destination.
+
+For example, consider the following code:
+
+```javascript
+// WARNING! BROKEN!
+net.createServer(function(socket) {
+
+ // we add an 'end' method, but never consume the data
+ socket.on('end', function() {
+ // It will never get here.
+ socket.end('I got your message (but didnt read it)\n');
+ });
+
+}).listen(1337);
+```
+
+In versions of Node.js prior to v0.10, the incoming message data would be
+simply discarded. However, in Node.js v0.10 and beyond,
+the socket will remain paused forever.
+
+The workaround in this situation is to call the `resume()` method to
+start the flow of data:
+
+```javascript
+// Workaround
+net.createServer(function(socket) {
+
+ socket.on('end', function() {
+ socket.end('I got your message (but didnt read it)\n');
+ });
+
+ // start the flow of data, discarding it.
+ socket.resume();
+
+}).listen(1337);
+```
+
+In addition to new Readable streams switching into flowing mode,
+pre-v0.10 style streams can be wrapped in a Readable class using the
+`wrap()` method.
+
+
+### Object Mode
+
+<!--type=misc-->
+
+Normally, Streams operate on Strings and Buffers exclusively.
+
+Streams that are in **object mode** can emit generic JavaScript values
+other than Buffers and Strings.
+
+A Readable stream in object mode will always return a single item from
+a call to `stream.read(size)`, regardless of what the size argument
+is.
+
+A Writable stream in object mode will always ignore the `encoding`
+argument to `stream.write(data, encoding)`.
+
+The special value `null` still retains its special value for object
+mode streams. That is, for object mode readable streams, `null` as a
+return value from `stream.read()` indicates that there is no more
+data, and [`stream.push(null)`][] will signal the end of stream data
+(`EOF`).
+
+No streams in Node.js core are object mode streams. This pattern is only
+used by userland streaming libraries.
+
+You should set `objectMode` in your stream child class constructor on
+the options object. Setting `objectMode` mid-stream is not safe.
+
+For Duplex streams `objectMode` can be set exclusively for readable or
+writable side with `readableObjectMode` and `writableObjectMode`
+respectively. These options can be used to implement parsers and
+serializers with Transform streams.
+
+```javascript
+var util = require('util');
+var StringDecoder = require('string_decoder').StringDecoder;
+var Transform = require('stream').Transform;
+util.inherits(JSONParseStream, Transform);
+
+// Gets \n-delimited JSON string data, and emits the parsed objects
+function JSONParseStream() {
+ if (!(this instanceof JSONParseStream))
+ return new JSONParseStream();
+
+ Transform.call(this, { readableObjectMode : true });
+
+ this._buffer = '';
+ this._decoder = new StringDecoder('utf8');
+}
+
+JSONParseStream.prototype._transform = function(chunk, encoding, cb) {
+ this._buffer += this._decoder.write(chunk);
+ // split on newlines
+ var lines = this._buffer.split(/\r?\n/);
+ // keep the last partial line buffered
+ this._buffer = lines.pop();
+ for (var l = 0; l < lines.length; l++) {
+ var line = lines[l];
+ try {
+ var obj = JSON.parse(line);
+ } catch (er) {
+ this.emit('error', er);
+ return;
+ }
+ // push the parsed object out to the readable consumer
+ this.push(obj);
+ }
+ cb();
+};
+
+JSONParseStream.prototype._flush = function(cb) {
+ // Just handle any leftover
+ var rem = this._buffer.trim();
+ if (rem) {
+ try {
+ var obj = JSON.parse(rem);
+ } catch (er) {
+ this.emit('error', er);
+ return;
+ }
+ // push the parsed object out to the readable consumer
+ this.push(obj);
+ }
+ cb();
+};
+```
+
+
+[EventEmitter]: https://iojs.org/dist/v4.1.2/doc/api/events.html#events_class_events_eventemitter
+[Object mode]: #stream_object_mode
+[`stream.push(chunk)`]: #stream_readable_push_chunk_encoding
+[`stream.push(null)`]: #stream_readable_push_chunk_encoding
+[`stream.push()`]: #stream_readable_push_chunk_encoding
+[`unpipe()`]: #stream_readable_unpipe_destination
+[unpiped]: #stream_readable_unpipe_destination
+[tcp sockets]: https://iojs.org/dist/v4.1.2/doc/api/net.html#net_class_net_socket
+[zlib streams]: zlib.html
+[zlib]: zlib.html
+[crypto streams]: crypto.html
+[crypto]: crypto.html
+[tls.CryptoStream]: https://iojs.org/dist/v4.1.2/doc/api/tls.html#tls_class_cryptostream
+[process.stdin]: https://iojs.org/dist/v4.1.2/doc/api/process.html#process_process_stdin
+[stdout]: https://iojs.org/dist/v4.1.2/doc/api/process.html#process_process_stdout
+[process.stdout]: https://iojs.org/dist/v4.1.2/doc/api/process.html#process_process_stdout
+[process.stderr]: https://iojs.org/dist/v4.1.2/doc/api/process.html#process_process_stderr
+[child process stdout and stderr]: https://iojs.org/dist/v4.1.2/doc/api/child_process.html#child_process_child_stdout
+[API for Stream Consumers]: #stream_api_for_stream_consumers
+[API for Stream Implementors]: #stream_api_for_stream_implementors
+[Readable]: #stream_class_stream_readable
+[Writable]: #stream_class_stream_writable
+[Duplex]: #stream_class_stream_duplex
+[Transform]: #stream_class_stream_transform
+[`end`]: #stream_event_end
+[`finish`]: #stream_event_finish
+[`_read(size)`]: #stream_readable_read_size_1
+[`_read()`]: #stream_readable_read_size_1
+[_read]: #stream_readable_read_size_1
+[`writable.write(chunk)`]: #stream_writable_write_chunk_encoding_callback
+[`write(chunk, encoding, callback)`]: #stream_writable_write_chunk_encoding_callback
+[`write()`]: #stream_writable_write_chunk_encoding_callback
+[`stream.write(chunk)`]: #stream_writable_write_chunk_encoding_callback
+[`_write(chunk, encoding, callback)`]: #stream_writable_write_chunk_encoding_callback_1
+[`_write()`]: #stream_writable_write_chunk_encoding_callback_1
+[_write]: #stream_writable_write_chunk_encoding_callback_1
+[`util.inherits`]: https://iojs.org/dist/v4.1.2/doc/api/util.html#util_util_inherits_constructor_superconstructor
+[`end()`]: #stream_writable_end_chunk_encoding_callback
+[`'data'` event]: #stream_event_data
+[`resume()`]: #stream_readable_resume
+[`readable.resume()`]: #stream_readable_resume
+[`pause()`]: #stream_readable_pause
+[`unpipe()`]: #stream_readable_unpipe_destination
+[`pipe()`]: #stream_readable_pipe_destination_options
diff --git a/doc/wg-meetings/2015-01-30.md b/doc/wg-meetings/2015-01-30.md
new file mode 100644
index 0000000..83275f1
--- /dev/null
+++ b/doc/wg-meetings/2015-01-30.md
@@ -0,0 +1,60 @@
+# streams WG Meeting 2015-01-30
+
+## Links
+
+* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg
+* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106
+* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/
+
+## Agenda
+
+Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting.
+
+* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105)
+* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101)
+* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102)
+* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99)
+
+## Minutes
+
+### adopt a charter
+
+* group: +1's all around
+
+### What versioning scheme should be adopted?
+* group: +1’s 3.0.0
+* domenic+group: pulling in patches from other sources where appropriate
+* mikeal: version independently, suggesting versions for io.js
+* mikeal+domenic: work with TC to notify in advance of changes
+simpler stream creation
+
+### streamline creation of streams
+* sam: streamline creation of streams
+* domenic: nice simple solution posted
+ but, we lose the opportunity to change the model
+ may not be backwards incompatible (double check keys)
+
+ **action item:** domenic will check
+
+### remove implicit flowing of streams on(‘data’)
+* add isFlowing / isPaused
+* mikeal: worrying that we’re documenting polyfill methods – confuses users
+* domenic: more reflective API is probably good, with warning labels for users
+* new section for mad scientists (reflective stream access)
+* calvin: name the “third state”
+* mikeal: maybe borrow the name from whatwg?
+* domenic: we’re missing the “third state”
+* consensus: kind of difficult to name the third state
+* mikeal: figure out differences in states / compat
+* mathias: always flow on data – eliminates third state
+ * explore what it breaks
+
+**action items:**
+* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream)
+* ask rod/build for infrastructure
+* **chris**: explore the “flow on data” approach
+* add isPaused/isFlowing
+* add new docs section
+* move isPaused to that section
+
+
diff --git a/duplex.js b/duplex.js
new file mode 100644
index 0000000..ca807af
--- /dev/null
+++ b/duplex.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_duplex.js")
diff --git a/examples/CAPSLOCKTYPER.JS b/examples/CAPSLOCKTYPER.JS
new file mode 100644
index 0000000..205a425
--- /dev/null
+++ b/examples/CAPSLOCKTYPER.JS
@@ -0,0 +1,32 @@
+var Transform = require('../transform');
+var inherits = require('util').inherits;
+
+// subclass
+function MyStream () {
+ Transform.call(this, {
+ lowWaterMark: 0,
+ encoding: 'utf8'
+ });
+}
+inherits(MyStream, Transform);
+
+MyStream.prototype._transform = function (chunk, outputFn, callback) {
+ outputFn(new Buffer(String(chunk).toUpperCase()));
+ callback();
+};
+
+// use it!
+var s = new MyStream();
+process.stdin.resume();
+process.stdin.pipe(s).pipe(process.stdout);
+if (process.stdin.setRawMode)
+ process.stdin.setRawMode(true);
+process.stdin.on('data', function (c) {
+ c = c.toString();
+ if (c === '\u0003' || c === '\u0004') {
+ process.stdin.pause();
+ s.end();
+ }
+ if (c === '\r')
+ process.stdout.write('\n');
+});
diff --git a/examples/typer.js b/examples/typer.js
new file mode 100644
index 0000000..c16eb6f
--- /dev/null
+++ b/examples/typer.js
@@ -0,0 +1,17 @@
+var fs = require('fs');
+var fst = fs.createReadStream(__filename);
+var Readable = require('../readable.js');
+var rst = new Readable();
+rst.wrap(fst);
+
+rst.on('end', function() {
+ process.stdin.pause();
+});
+
+process.stdin.setRawMode(true);
+process.stdin.on('data', function() {
+ var c = rst.read(3);
+ if (!c) return setTimeout(process.exit, 500)
+ process.stdout.write(c);
+});
+process.stdin.resume();
diff --git a/lib/_stream_duplex.js b/lib/_stream_duplex.js
new file mode 100644
index 0000000..69558af
--- /dev/null
+++ b/lib/_stream_duplex.js
@@ -0,0 +1,82 @@
+// a duplex stream is just a stream that is both readable and writable.
+// Since JS doesn't have multiple prototypal inheritance, this class
+// prototypally inherits from Readable, and then parasitically from
+// Writable.
+
+'use strict';
+
+/*<replacement>*/
+var objectKeys = Object.keys || function (obj) {
+ var keys = [];
+ for (var key in obj) keys.push(key);
+ return keys;
+}
+/*</replacement>*/
+
+
+module.exports = Duplex;
+
+/*<replacement>*/
+var processNextTick = require('process-nextick-args');
+/*</replacement>*/
+
+
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+var Readable = require('./_stream_readable');
+var Writable = require('./_stream_writable');
+
+util.inherits(Duplex, Readable);
+
+var keys = objectKeys(Writable.prototype);
+for (var v = 0; v < keys.length; v++) {
+ var method = keys[v];
+ if (!Duplex.prototype[method])
+ Duplex.prototype[method] = Writable.prototype[method];
+}
+
+function Duplex(options) {
+ if (!(this instanceof Duplex))
+ return new Duplex(options);
+
+ Readable.call(this, options);
+ Writable.call(this, options);
+
+ if (options && options.readable === false)
+ this.readable = false;
+
+ if (options && options.writable === false)
+ this.writable = false;
+
+ this.allowHalfOpen = true;
+ if (options && options.allowHalfOpen === false)
+ this.allowHalfOpen = false;
+
+ this.once('end', onend);
+}
+
+// the no-half-open enforcer
+function onend() {
+ // if we allow half-open state, or if the writable side ended,
+ // then we're ok.
+ if (this.allowHalfOpen || this._writableState.ended)
+ return;
+
+ // no more data can be written.
+ // But allow more writes to happen in this tick.
+ processNextTick(onEndNT, this);
+}
+
+function onEndNT(self) {
+ self.end();
+}
+
+function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+}
diff --git a/lib/_stream_passthrough.js b/lib/_stream_passthrough.js
new file mode 100644
index 0000000..bddfdd0
--- /dev/null
+++ b/lib/_stream_passthrough.js
@@ -0,0 +1,27 @@
+// a passthrough stream.
+// basically just the most minimal sort of Transform stream.
+// Every written chunk gets output as-is.
+
+'use strict';
+
+module.exports = PassThrough;
+
+var Transform = require('./_stream_transform');
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+util.inherits(PassThrough, Transform);
+
+function PassThrough(options) {
+ if (!(this instanceof PassThrough))
+ return new PassThrough(options);
+
+ Transform.call(this, options);
+}
+
+PassThrough.prototype._transform = function(chunk, encoding, cb) {
+ cb(null, chunk);
+};
diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js
new file mode 100644
index 0000000..b2986f5
--- /dev/null
+++ b/lib/_stream_readable.js
@@ -0,0 +1,961 @@
+'use strict';
+
+module.exports = Readable;
+
+/*<replacement>*/
+var processNextTick = require('process-nextick-args');
+/*</replacement>*/
+
+
+/*<replacement>*/
+var isArray = require('isarray');
+/*</replacement>*/
+
+
+/*<replacement>*/
+var Buffer = require('buffer').Buffer;
+/*</replacement>*/
+
+Readable.ReadableState = ReadableState;
+
+var EE = require('events');
+
+/*<replacement>*/
+var EElistenerCount = function(emitter, type) {
+ return emitter.listeners(type).length;
+};
+/*</replacement>*/
+
+
+
+/*<replacement>*/
+var Stream;
+(function (){try{
+ Stream = require('st' + 'ream');
+}catch(_){}finally{
+ if (!Stream)
+ Stream = require('events').EventEmitter;
+}}())
+/*</replacement>*/
+
+var Buffer = require('buffer').Buffer;
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+
+
+/*<replacement>*/
+var debugUtil = require('util');
+var debug;
+if (debugUtil && debugUtil.debuglog) {
+ debug = debugUtil.debuglog('stream');
+} else {
+ debug = function () {};
+}
+/*</replacement>*/
+
+var StringDecoder;
+
+util.inherits(Readable, Stream);
+
+function ReadableState(options, stream) {
+ var Duplex = require('./_stream_duplex');
+
+ options = options || {};
+
+ // object stream flag. Used to make read(n) ignore n and to
+ // make all the buffer merging and length checks go away
+ this.objectMode = !!options.objectMode;
+
+ if (stream instanceof Duplex)
+ this.objectMode = this.objectMode || !!options.readableObjectMode;
+
+ // the point at which it stops calling _read() to fill the buffer
+ // Note: 0 is a valid value, means "don't call _read preemptively ever"
+ var hwm = options.highWaterMark;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+ this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
+
+ // cast to ints.
+ this.highWaterMark = ~~this.highWaterMark;
+
+ this.buffer = [];
+ this.length = 0;
+ this.pipes = null;
+ this.pipesCount = 0;
+ this.flowing = null;
+ this.ended = false;
+ this.endEmitted = false;
+ this.reading = false;
+
+ // a flag to be able to tell if the onwrite cb is called immediately,
+ // or on a later tick. We set this to true at first, because any
+ // actions that shouldn't happen until "later" should generally also
+ // not happen before the first write call.
+ this.sync = true;
+
+ // whenever we return null, then we set a flag to say
+ // that we're awaiting a 'readable' event emission.
+ this.needReadable = false;
+ this.emittedReadable = false;
+ this.readableListening = false;
+
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = options.defaultEncoding || 'utf8';
+
+ // when piping, we only care about 'readable' events that happen
+ // after read()ing all the bytes and not getting any pushback.
+ this.ranOut = false;
+
+ // the number of writers that are awaiting a drain event in .pipe()s
+ this.awaitDrain = 0;
+
+ // if true, a maybeReadMore has been scheduled
+ this.readingMore = false;
+
+ this.decoder = null;
+ this.encoding = null;
+ if (options.encoding) {
+ if (!StringDecoder)
+ StringDecoder = require('string_decoder/').StringDecoder;
+ this.decoder = new StringDecoder(options.encoding);
+ this.encoding = options.encoding;
+ }
+}
+
+function Readable(options) {
+ var Duplex = require('./_stream_duplex');
+
+ if (!(this instanceof Readable))
+ return new Readable(options);
+
+ this._readableState = new ReadableState(options, this);
+
+ // legacy
+ this.readable = true;
+
+ if (options && typeof options.read === 'function')
+ this._read = options.read;
+
+ Stream.call(this);
+}
+
+// Manually shove something into the read() buffer.
+// This returns true if the highWaterMark has not been hit yet,
+// similar to how Writable.write() returns true if you should
+// write() some more.
+Readable.prototype.push = function(chunk, encoding) {
+ var state = this._readableState;
+
+ if (!state.objectMode && typeof chunk === 'string') {
+ encoding = encoding || state.defaultEncoding;
+ if (encoding !== state.encoding) {
+ chunk = new Buffer(chunk, encoding);
+ encoding = '';
+ }
+ }
+
+ return readableAddChunk(this, state, chunk, encoding, false);
+};
+
+// Unshift should *always* be something directly out of read()
+Readable.prototype.unshift = function(chunk) {
+ var state = this._readableState;
+ return readableAddChunk(this, state, chunk, '', true);
+};
+
+Readable.prototype.isPaused = function() {
+ return this._readableState.flowing === false;
+};
+
+function readableAddChunk(stream, state, chunk, encoding, addToFront) {
+ var er = chunkInvalid(state, chunk);
+ if (er) {
+ stream.emit('error', er);
+ } else if (chunk === null) {
+ state.reading = false;
+ onEofChunk(stream, state);
+ } else if (state.objectMode || chunk && chunk.length > 0) {
+ if (state.ended && !addToFront) {
+ var e = new Error('stream.push() after EOF');
+ stream.emit('error', e);
+ } else if (state.endEmitted && addToFront) {
+ var e = new Error('stream.unshift() after end event');
+ stream.emit('error', e);
+ } else {
+ if (state.decoder && !addToFront && !encoding)
+ chunk = state.decoder.write(chunk);
+
+ if (!addToFront)
+ state.reading = false;
+
+ // if we want the data now, just emit it.
+ if (state.flowing && state.length === 0 && !state.sync) {
+ stream.emit('data', chunk);
+ stream.read(0);
+ } else {
+ // update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length;
+ if (addToFront)
+ state.buffer.unshift(chunk);
+ else
+ state.buffer.push(chunk);
+
+ if (state.needReadable)
+ emitReadable(stream);
+ }
+
+ maybeReadMore(stream, state);
+ }
+ } else if (!addToFront) {
+ state.reading = false;
+ }
+
+ return needMoreData(state);
+}
+
+
+// if it's past the high water mark, we can push in some more.
+// Also, if we have no data yet, we can stand some
+// more bytes. This is to work around cases where hwm=0,
+// such as the repl. Also, if the push() triggered a
+// readable event, and the user called read(largeNumber) such that
+// needReadable was set, then we ought to push more, so that another
+// 'readable' event will be triggered.
+function needMoreData(state) {
+ return !state.ended &&
+ (state.needReadable ||
+ state.length < state.highWaterMark ||
+ state.length === 0);
+}
+
+// backwards compatibility.
+Readable.prototype.setEncoding = function(enc) {
+ if (!StringDecoder)
+ StringDecoder = require('string_decoder/').StringDecoder;
+ this._readableState.decoder = new StringDecoder(enc);
+ this._readableState.encoding = enc;
+ return this;
+};
+
+// Don't raise the hwm > 8MB
+var MAX_HWM = 0x800000;
+function computeNewHighWaterMark(n) {
+ if (n >= MAX_HWM) {
+ n = MAX_HWM;
+ } else {
+ // Get the next highest power of 2
+ n--;
+ n |= n >>> 1;
+ n |= n >>> 2;
+ n |= n >>> 4;
+ n |= n >>> 8;
+ n |= n >>> 16;
+ n++;
+ }
+ return n;
+}
+
+function howMuchToRead(n, state) {
+ if (state.length === 0 && state.ended)
+ return 0;
+
+ if (state.objectMode)
+ return n === 0 ? 0 : 1;
+
+ if (n === null || isNaN(n)) {
+ // only flow one buffer at a time
+ if (state.flowing && state.buffer.length)
+ return state.buffer[0].length;
+ else
+ return state.length;
+ }
+
+ if (n <= 0)
+ return 0;
+
+ // If we're asking for more than the target buffer level,
+ // then raise the water mark. Bump up to the next highest
+ // power of 2, to prevent increasing it excessively in tiny
+ // amounts.
+ if (n > state.highWaterMark)
+ state.highWaterMark = computeNewHighWaterMark(n);
+
+ // don't have that much. return null, unless we've ended.
+ if (n > state.length) {
+ if (!state.ended) {
+ state.needReadable = true;
+ return 0;
+ } else {
+ return state.length;
+ }
+ }
+
+ return n;
+}
+
+// you can override either this method, or the async _read(n) below.
+Readable.prototype.read = function(n) {
+ debug('read', n);
+ var state = this._readableState;
+ var nOrig = n;
+
+ if (typeof n !== 'number' || n > 0)
+ state.emittedReadable = false;
+
+ // if we're doing read(0) to trigger a readable event, but we
+ // already have a bunch of data in the buffer, then just trigger
+ // the 'readable' event and move on.
+ if (n === 0 &&
+ state.needReadable &&
+ (state.length >= state.highWaterMark || state.ended)) {
+ debug('read: emitReadable', state.length, state.ended);
+ if (state.length === 0 && state.ended)
+ endReadable(this);
+ else
+ emitReadable(this);
+ return null;
+ }
+
+ n = howMuchToRead(n, state);
+
+ // if we've ended, and we're now clear, then finish it up.
+ if (n === 0 && state.ended) {
+ if (state.length === 0)
+ endReadable(this);
+ return null;
+ }
+
+ // All the actual chunk generation logic needs to be
+ // *below* the call to _read. The reason is that in certain
+ // synthetic stream cases, such as passthrough streams, _read
+ // may be a completely synchronous operation which may change
+ // the state of the read buffer, providing enough data when
+ // before there was *not* enough.
+ //
+ // So, the steps are:
+ // 1. Figure out what the state of things will be after we do
+ // a read from the buffer.
+ //
+ // 2. If that resulting state will trigger a _read, then call _read.
+ // Note that this may be asynchronous, or synchronous. Yes, it is
+ // deeply ugly to write APIs this way, but that still doesn't mean
+ // that the Readable class should behave improperly, as streams are
+ // designed to be sync/async agnostic.
+ // Take note if the _read call is sync or async (ie, if the read call
+ // has returned yet), so that we know whether or not it's safe to emit
+ // 'readable' etc.
+ //
+ // 3. Actually pull the requested chunks out of the buffer and return.
+
+ // if we need a readable event, then we need to do some reading.
+ var doRead = state.needReadable;
+ debug('need readable', doRead);
+
+ // if we currently have less than the highWaterMark, then also read some
+ if (state.length === 0 || state.length - n < state.highWaterMark) {
+ doRead = true;
+ debug('length less than watermark', doRead);
+ }
+
+ // however, if we've ended, then there's no point, and if we're already
+ // reading, then it's unnecessary.
+ if (state.ended || state.reading) {
+ doRead = false;
+ debug('reading or ended', doRead);
+ }
+
+ if (doRead) {
+ debug('do read');
+ state.reading = true;
+ state.sync = true;
+ // if the length is currently zero, then we *need* a readable event.
+ if (state.length === 0)
+ state.needReadable = true;
+ // call internal read method
+ this._read(state.highWaterMark);
+ state.sync = false;
+ }
+
+ // If _read pushed data synchronously, then `reading` will be false,
+ // and we need to re-evaluate how much data we can return to the user.
+ if (doRead && !state.reading)
+ n = howMuchToRead(nOrig, state);
+
+ var ret;
+ if (n > 0)
+ ret = fromList(n, state);
+ else
+ ret = null;
+
+ if (ret === null) {
+ state.needReadable = true;
+ n = 0;
+ }
+
+ state.length -= n;
+
+ // If we have nothing in the buffer, then we want to know
+ // as soon as we *do* get something into the buffer.
+ if (state.length === 0 && !state.ended)
+ state.needReadable = true;
+
+ // If we tried to read() past the EOF, then emit end on the next tick.
+ if (nOrig !== n && state.ended && state.length === 0)
+ endReadable(this);
+
+ if (ret !== null)
+ this.emit('data', ret);
+
+ return ret;
+};
+
+function chunkInvalid(state, chunk) {
+ var er = null;
+ if (!(Buffer.isBuffer(chunk)) &&
+ typeof chunk !== 'string' &&
+ chunk !== null &&
+ chunk !== undefined &&
+ !state.objectMode) {
+ er = new TypeError('Invalid non-string/buffer chunk');
+ }
+ return er;
+}
+
+
+function onEofChunk(stream, state) {
+ if (state.ended) return;
+ if (state.decoder) {
+ var chunk = state.decoder.end();
+ if (chunk && chunk.length) {
+ state.buffer.push(chunk);
+ state.length += state.objectMode ? 1 : chunk.length;
+ }
+ }
+ state.ended = true;
+
+ // emit 'readable' now to make sure it gets picked up.
+ emitReadable(stream);
+}
+
+// Don't emit readable right away in sync mode, because this can trigger
+// another read() call => stack overflow. This way, it might trigger
+// a nextTick recursion warning, but that's not so bad.
+function emitReadable(stream) {
+ var state = stream._readableState;
+ state.needReadable = false;
+ if (!state.emittedReadable) {
+ debug('emitReadable', state.flowing);
+ state.emittedReadable = true;
+ if (state.sync)
+ processNextTick(emitReadable_, stream);
+ else
+ emitReadable_(stream);
+ }
+}
+
+function emitReadable_(stream) {
+ debug('emit readable');
+ stream.emit('readable');
+ flow(stream);
+}
+
+
+// at this point, the user has presumably seen the 'readable' event,
+// and called read() to consume some data. that may have triggered
+// in turn another _read(n) call, in which case reading = true if
+// it's in progress.
+// However, if we're not ended, or reading, and the length < hwm,
+// then go ahead and try to read some more preemptively.
+function maybeReadMore(stream, state) {
+ if (!state.readingMore) {
+ state.readingMore = true;
+ processNextTick(maybeReadMore_, stream, state);
+ }
+}
+
+function maybeReadMore_(stream, state) {
+ var len = state.length;
+ while (!state.reading && !state.flowing && !state.ended &&
+ state.length < state.highWaterMark) {
+ debug('maybeReadMore read 0');
+ stream.read(0);
+ if (len === state.length)
+ // didn't get any data, stop spinning.
+ break;
+ else
+ len = state.length;
+ }
+ state.readingMore = false;
+}
+
+// abstract method. to be overridden in specific implementation classes.
+// call cb(er, data) where data is <= n in length.
+// for virtual (non-string, non-buffer) streams, "length" is somewhat
+// arbitrary, and perhaps not very meaningful.
+Readable.prototype._read = function(n) {
+ this.emit('error', new Error('not implemented'));
+};
+
+Readable.prototype.pipe = function(dest, pipeOpts) {
+ var src = this;
+ var state = this._readableState;
+
+ switch (state.pipesCount) {
+ case 0:
+ state.pipes = dest;
+ break;
+ case 1:
+ state.pipes = [state.pipes, dest];
+ break;
+ default:
+ state.pipes.push(dest);
+ break;
+ }
+ state.pipesCount += 1;
+ debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
+
+ var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
+ dest !== process.stdout &&
+ dest !== process.stderr;
+
+ var endFn = doEnd ? onend : cleanup;
+ if (state.endEmitted)
+ processNextTick(endFn);
+ else
+ src.once('end', endFn);
+
+ dest.on('unpipe', onunpipe);
+ function onunpipe(readable) {
+ debug('onunpipe');
+ if (readable === src) {
+ cleanup();
+ }
+ }
+
+ function onend() {
+ debug('onend');
+ dest.end();
+ }
+
+ // when the dest drains, it reduces the awaitDrain counter
+ // on the source. This would be more elegant with a .once()
+ // handler in flow(), but adding and removing repeatedly is
+ // too slow.
+ var ondrain = pipeOnDrain(src);
+ dest.on('drain', ondrain);
+
+ function cleanup() {
+ debug('cleanup');
+ // cleanup event handlers once the pipe is broken
+ dest.removeListener('close', onclose);
+ dest.removeListener('finish', onfinish);
+ dest.removeListener('drain', ondrain);
+ dest.removeListener('error', onerror);
+ dest.removeListener('unpipe', onunpipe);
+ src.removeListener('end', onend);
+ src.removeListener('end', cleanup);
+ src.removeListener('data', ondata);
+
+ // if the reader is waiting for a drain event from this
+ // specific writer, then it would cause it to never start
+ // flowing again.
+ // So, if this is awaiting a drain, then we just call it now.
+ // If we don't know, then assume that we are waiting for one.
+ if (state.awaitDrain &&
+ (!dest._writableState || dest._writableState.needDrain))
+ ondrain();
+ }
+
+ src.on('data', ondata);
+ function ondata(chunk) {
+ debug('ondata');
+ var ret = dest.write(chunk);
+ if (false === ret) {
+ debug('false write response, pause',
+ src._readableState.awaitDrain);
+ src._readableState.awaitDrain++;
+ src.pause();
+ }
+ }
+
+ // if the dest has an error, then stop piping into it.
+ // however, don't suppress the throwing behavior for this.
+ function onerror(er) {
+ debug('onerror', er);
+ unpipe();
+ dest.removeListener('error', onerror);
+ if (EElistenerCount(dest, 'error') === 0)
+ dest.emit('error', er);
+ }
+ // This is a brutally ugly hack to make sure that our error handler
+ // is attached before any userland ones. NEVER DO THIS.
+ if (!dest._events || !dest._events.error)
+ dest.on('error', onerror);
+ else if (isArray(dest._events.error))
+ dest._events.error.unshift(onerror);
+ else
+ dest._events.error = [onerror, dest._events.error];
+
+
+ // Both close and finish should trigger unpipe, but only once.
+ function onclose() {
+ dest.removeListener('finish', onfinish);
+ unpipe();
+ }
+ dest.once('close', onclose);
+ function onfinish() {
+ debug('onfinish');
+ dest.removeListener('close', onclose);
+ unpipe();
+ }
+ dest.once('finish', onfinish);
+
+ function unpipe() {
+ debug('unpipe');
+ src.unpipe(dest);
+ }
+
+ // tell the dest that it's being piped to
+ dest.emit('pipe', src);
+
+ // start the flow if it hasn't been started already.
+ if (!state.flowing) {
+ debug('pipe resume');
+ src.resume();
+ }
+
+ return dest;
+};
+
+function pipeOnDrain(src) {
+ return function() {
+ var state = src._readableState;
+ debug('pipeOnDrain', state.awaitDrain);
+ if (state.awaitDrain)
+ state.awaitDrain--;
+ if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
+ state.flowing = true;
+ flow(src);
+ }
+ };
+}
+
+
+Readable.prototype.unpipe = function(dest) {
+ var state = this._readableState;
+
+ // if we're not piping anywhere, then do nothing.
+ if (state.pipesCount === 0)
+ return this;
+
+ // just one destination. most common case.
+ if (state.pipesCount === 1) {
+ // passed in one, but it's not the right one.
+ if (dest && dest !== state.pipes)
+ return this;
+
+ if (!dest)
+ dest = state.pipes;
+
+ // got a match.
+ state.pipes = null;
+ state.pipesCount = 0;
+ state.flowing = false;
+ if (dest)
+ dest.emit('unpipe', this);
+ return this;
+ }
+
+ // slow case. multiple pipe destinations.
+
+ if (!dest) {
+ // remove all.
+ var dests = state.pipes;
+ var len = state.pipesCount;
+ state.pipes = null;
+ state.pipesCount = 0;
+ state.flowing = false;
+
+ for (var i = 0; i < len; i++)
+ dests[i].emit('unpipe', this);
+ return this;
+ }
+
+ // try to find the right one.
+ var i = indexOf(state.pipes, dest);
+ if (i === -1)
+ return this;
+
+ state.pipes.splice(i, 1);
+ state.pipesCount -= 1;
+ if (state.pipesCount === 1)
+ state.pipes = state.pipes[0];
+
+ dest.emit('unpipe', this);
+
+ return this;
+};
+
+// set up data events if they are asked for
+// Ensure readable listeners eventually get something
+Readable.prototype.on = function(ev, fn) {
+ var res = Stream.prototype.on.call(this, ev, fn);
+
+ // If listening to data, and it has not explicitly been paused,
+ // then call resume to start the flow of data on the next tick.
+ if (ev === 'data' && false !== this._readableState.flowing) {
+ this.resume();
+ }
+
+ if (ev === 'readable' && this.readable) {
+ var state = this._readableState;
+ if (!state.readableListening) {
+ state.readableListening = true;
+ state.emittedReadable = false;
+ state.needReadable = true;
+ if (!state.reading) {
+ processNextTick(nReadingNextTick, this);
+ } else if (state.length) {
+ emitReadable(this, state);
+ }
+ }
+ }
+
+ return res;
+};
+Readable.prototype.addListener = Readable.prototype.on;
+
+function nReadingNextTick(self) {
+ debug('readable nexttick read 0');
+ self.read(0);
+}
+
+// pause() and resume() are remnants of the legacy readable stream API
+// If the user uses them, then switch into old mode.
+Readable.prototype.resume = function() {
+ var state = this._readableState;
+ if (!state.flowing) {
+ debug('resume');
+ state.flowing = true;
+ resume(this, state);
+ }
+ return this;
+};
+
+function resume(stream, state) {
+ if (!state.resumeScheduled) {
+ state.resumeScheduled = true;
+ processNextTick(resume_, stream, state);
+ }
+}
+
+function resume_(stream, state) {
+ if (!state.reading) {
+ debug('resume read 0');
+ stream.read(0);
+ }
+
+ state.resumeScheduled = false;
+ stream.emit('resume');
+ flow(stream);
+ if (state.flowing && !state.reading)
+ stream.read(0);
+}
+
+Readable.prototype.pause = function() {
+ debug('call pause flowing=%j', this._readableState.flowing);
+ if (false !== this._readableState.flowing) {
+ debug('pause');
+ this._readableState.flowing = false;
+ this.emit('pause');
+ }
+ return this;
+};
+
+function flow(stream) {
+ var state = stream._readableState;
+ debug('flow', state.flowing);
+ if (state.flowing) {
+ do {
+ var chunk = stream.read();
+ } while (null !== chunk && state.flowing);
+ }
+}
+
+// wrap an old-style stream as the async data source.
+// This is *not* part of the readable stream interface.
+// It is an ugly unfortunate mess of history.
+Readable.prototype.wrap = function(stream) {
+ var state = this._readableState;
+ var paused = false;
+
+ var self = this;
+ stream.on('end', function() {
+ debug('wrapped end');
+ if (state.decoder && !state.ended) {
+ var chunk = state.decoder.end();
+ if (chunk && chunk.length)
+ self.push(chunk);
+ }
+
+ self.push(null);
+ });
+
+ stream.on('data', function(chunk) {
+ debug('wrapped data');
+ if (state.decoder)
+ chunk = state.decoder.write(chunk);
+
+ // don't skip over falsy values in objectMode
+ if (state.objectMode && (chunk === null || chunk === undefined))
+ return;
+ else if (!state.objectMode && (!chunk || !chunk.length))
+ return;
+
+ var ret = self.push(chunk);
+ if (!ret) {
+ paused = true;
+ stream.pause();
+ }
+ });
+
+ // proxy all the other methods.
+ // important when wrapping filters and duplexes.
+ for (var i in stream) {
+ if (this[i] === undefined && typeof stream[i] === 'function') {
+ this[i] = function(method) { return function() {
+ return stream[method].apply(stream, arguments);
+ }; }(i);
+ }
+ }
+
+ // proxy certain important events.
+ var events = ['error', 'close', 'destroy', 'pause', 'resume'];
+ forEach(events, function(ev) {
+ stream.on(ev, self.emit.bind(self, ev));
+ });
+
+ // when we try to consume some more bytes, simply unpause the
+ // underlying stream.
+ self._read = function(n) {
+ debug('wrapped _read', n);
+ if (paused) {
+ paused = false;
+ stream.resume();
+ }
+ };
+
+ return self;
+};
+
+
+// exposed for testing purposes only.
+Readable._fromList = fromList;
+
+// Pluck off n bytes from an array of buffers.
+// Length is the combined lengths of all the buffers in the list.
+function fromList(n, state) {
+ var list = state.buffer;
+ var length = state.length;
+ var stringMode = !!state.decoder;
+ var objectMode = !!state.objectMode;
+ var ret;
+
+ // nothing in the list, definitely empty.
+ if (list.length === 0)
+ return null;
+
+ if (length === 0)
+ ret = null;
+ else if (objectMode)
+ ret = list.shift();
+ else if (!n || n >= length) {
+ // read it all, truncate the array.
+ if (stringMode)
+ ret = list.join('');
+ else
+ ret = Buffer.concat(list, length);
+ list.length = 0;
+ } else {
+ // read just some of it.
+ if (n < list[0].length) {
+ // just take a part of the first list item.
+ // slice is the same for buffers and strings.
+ var buf = list[0];
+ ret = buf.slice(0, n);
+ list[0] = buf.slice(n);
+ } else if (n === list[0].length) {
+ // first list is a perfect match
+ ret = list.shift();
+ } else {
+ // complex case.
+ // we have enough to cover it, but it spans past the first buffer.
+ if (stringMode)
+ ret = '';
+ else
+ ret = new Buffer(n);
+
+ var c = 0;
+ for (var i = 0, l = list.length; i < l && c < n; i++) {
+ var buf = list[0];
+ var cpy = Math.min(n - c, buf.length);
+
+ if (stringMode)
+ ret += buf.slice(0, cpy);
+ else
+ buf.copy(ret, c, 0, cpy);
+
+ if (cpy < buf.length)
+ list[0] = buf.slice(cpy);
+ else
+ list.shift();
+
+ c += cpy;
+ }
+ }
+ }
+
+ return ret;
+}
+
+function endReadable(stream) {
+ var state = stream._readableState;
+
+ // If we get here before consuming all the bytes, then that is a
+ // bug in node. Should never happen.
+ if (state.length > 0)
+ throw new Error('endReadable called on non-empty stream');
+
+ if (!state.endEmitted) {
+ state.ended = true;
+ processNextTick(endReadableNT, state, stream);
+ }
+}
+
+function endReadableNT(state, stream) {
+ // Check that we didn't get one last unshift.
+ if (!state.endEmitted && state.length === 0) {
+ state.endEmitted = true;
+ stream.readable = false;
+ stream.emit('end');
+ }
+}
+
+function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+}
+
+function indexOf (xs, x) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) return i;
+ }
+ return -1;
+}
diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js
new file mode 100644
index 0000000..3675d18
--- /dev/null
+++ b/lib/_stream_transform.js
@@ -0,0 +1,197 @@
+// a transform stream is a readable/writable stream where you do
+// something with the data. Sometimes it's called a "filter",
+// but that's not a great name for it, since that implies a thing where
+// some bits pass through, and others are simply ignored. (That would
+// be a valid example of a transform, of course.)
+//
+// While the output is causally related to the input, it's not a
+// necessarily symmetric or synchronous transformation. For example,
+// a zlib stream might take multiple plain-text writes(), and then
+// emit a single compressed chunk some time in the future.
+//
+// Here's how this works:
+//
+// The Transform stream has all the aspects of the readable and writable
+// stream classes. When you write(chunk), that calls _write(chunk,cb)
+// internally, and returns false if there's a lot of pending writes
+// buffered up. When you call read(), that calls _read(n) until
+// there's enough pending readable data buffered up.
+//
+// In a transform stream, the written data is placed in a buffer. When
+// _read(n) is called, it transforms the queued up data, calling the
+// buffered _write cb's as it consumes chunks. If consuming a single
+// written chunk would result in multiple output chunks, then the first
+// outputted bit calls the readcb, and subsequent chunks just go into
+// the read buffer, and will cause it to emit 'readable' if necessary.
+//
+// This way, back-pressure is actually determined by the reading side,
+// since _read has to be called to start processing a new chunk. However,
+// a pathological inflate type of transform can cause excessive buffering
+// here. For example, imagine a stream where every byte of input is
+// interpreted as an integer from 0-255, and then results in that many
+// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
+// 1kb of data being output. In this case, you could write a very small
+// amount of input, and end up with a very large amount of output. In
+// such a pathological inflating mechanism, there'd be no way to tell
+// the system to stop doing the transform. A single 4MB write could
+// cause the system to run out of memory.
+//
+// However, even in such a pathological case, only a single written chunk
+// would be consumed, and then the rest would wait (un-transformed) until
+// the results of the previous transformed chunk were consumed.
+
+'use strict';
+
+module.exports = Transform;
+
+var Duplex = require('./_stream_duplex');
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+util.inherits(Transform, Duplex);
+
+
+function TransformState(stream) {
+ this.afterTransform = function(er, data) {
+ return afterTransform(stream, er, data);
+ };
+
+ this.needTransform = false;
+ this.transforming = false;
+ this.writecb = null;
+ this.writechunk = null;
+}
+
+function afterTransform(stream, er, data) {
+ var ts = stream._transformState;
+ ts.transforming = false;
+
+ var cb = ts.writecb;
+
+ if (!cb)
+ return stream.emit('error', new Error('no writecb in Transform class'));
+
+ ts.writechunk = null;
+ ts.writecb = null;
+
+ if (data !== null && data !== undefined)
+ stream.push(data);
+
+ if (cb)
+ cb(er);
+
+ var rs = stream._readableState;
+ rs.reading = false;
+ if (rs.needReadable || rs.length < rs.highWaterMark) {
+ stream._read(rs.highWaterMark);
+ }
+}
+
+
+function Transform(options) {
+ if (!(this instanceof Transform))
+ return new Transform(options);
+
+ Duplex.call(this, options);
+
+ this._transformState = new TransformState(this);
+
+ // when the writable side finishes, then flush out anything remaining.
+ var stream = this;
+
+ // start out asking for a readable event once data is transformed.
+ this._readableState.needReadable = true;
+
+ // we have implemented the _read method, and done the other things
+ // that Readable wants before the first _read call, so unset the
+ // sync guard flag.
+ this._readableState.sync = false;
+
+ if (options) {
+ if (typeof options.transform === 'function')
+ this._transform = options.transform;
+
+ if (typeof options.flush === 'function')
+ this._flush = options.flush;
+ }
+
+ this.once('prefinish', function() {
+ if (typeof this._flush === 'function')
+ this._flush(function(er) {
+ done(stream, er);
+ });
+ else
+ done(stream);
+ });
+}
+
+Transform.prototype.push = function(chunk, encoding) {
+ this._transformState.needTransform = false;
+ return Duplex.prototype.push.call(this, chunk, encoding);
+};
+
+// This is the part where you do stuff!
+// override this function in implementation classes.
+// 'chunk' is an input chunk.
+//
+// Call `push(newChunk)` to pass along transformed output
+// to the readable side. You may call 'push' zero or more times.
+//
+// Call `cb(err)` when you are done with this chunk. If you pass
+// an error, then that'll put the hurt on the whole operation. If you
+// never call cb(), then you'll never get another chunk.
+Transform.prototype._transform = function(chunk, encoding, cb) {
+ throw new Error('not implemented');
+};
+
+Transform.prototype._write = function(chunk, encoding, cb) {
+ var ts = this._transformState;
+ ts.writecb = cb;
+ ts.writechunk = chunk;
+ ts.writeencoding = encoding;
+ if (!ts.transforming) {
+ var rs = this._readableState;
+ if (ts.needTransform ||
+ rs.needReadable ||
+ rs.length < rs.highWaterMark)
+ this._read(rs.highWaterMark);
+ }
+};
+
+// Doesn't matter what the args are here.
+// _transform does all the work.
+// That we got here means that the readable side wants more data.
+Transform.prototype._read = function(n) {
+ var ts = this._transformState;
+
+ if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
+ ts.transforming = true;
+ this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
+ } else {
+ // mark that we need a transform, so that any data that comes in
+ // will get processed, now that we've asked for it.
+ ts.needTransform = true;
+ }
+};
+
+
+function done(stream, er) {
+ if (er)
+ return stream.emit('error', er);
+
+ // if there's nothing in the write buffer, then that means
+ // that nothing more will ever be provided
+ var ws = stream._writableState;
+ var ts = stream._transformState;
+
+ if (ws.length)
+ throw new Error('calling transform done when ws.length != 0');
+
+ if (ts.transforming)
+ throw new Error('calling transform done when still transforming');
+
+ return stream.push(null);
+}
diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js
new file mode 100644
index 0000000..091b991
--- /dev/null
+++ b/lib/_stream_writable.js
@@ -0,0 +1,527 @@
+// A bit simpler than readable streams.
+// Implement an async ._write(chunk, cb), and it'll handle all
+// the drain event emission and buffering.
+
+'use strict';
+
+module.exports = Writable;
+
+/*<replacement>*/
+var processNextTick = require('process-nextick-args');
+/*</replacement>*/
+
+
+/*<replacement>*/
+var Buffer = require('buffer').Buffer;
+/*</replacement>*/
+
+Writable.WritableState = WritableState;
+
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+
+/*<replacement>*/
+var internalUtil = {
+ deprecate: require('util-deprecate')
+};
+/*</replacement>*/
+
+
+
+/*<replacement>*/
+var Stream;
+(function (){try{
+ Stream = require('st' + 'ream');
+}catch(_){}finally{
+ if (!Stream)
+ Stream = require('events').EventEmitter;
+}}())
+/*</replacement>*/
+
+var Buffer = require('buffer').Buffer;
+
+util.inherits(Writable, Stream);
+
+function nop() {}
+
+function WriteReq(chunk, encoding, cb) {
+ this.chunk = chunk;
+ this.encoding = encoding;
+ this.callback = cb;
+ this.next = null;
+}
+
+function WritableState(options, stream) {
+ var Duplex = require('./_stream_duplex');
+
+ options = options || {};
+
+ // object stream flag to indicate whether or not this stream
+ // contains buffers or objects.
+ this.objectMode = !!options.objectMode;
+
+ if (stream instanceof Duplex)
+ this.objectMode = this.objectMode || !!options.writableObjectMode;
+
+ // the point at which write() starts returning false
+ // Note: 0 is a valid value, means that we always return false if
+ // the entire buffer is not flushed immediately on write()
+ var hwm = options.highWaterMark;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+ this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
+
+ // cast to ints.
+ this.highWaterMark = ~~this.highWaterMark;
+
+ this.needDrain = false;
+ // at the start of calling end()
+ this.ending = false;
+ // when end() has been called, and returned
+ this.ended = false;
+ // when 'finish' is emitted
+ this.finished = false;
+
+ // should we decode strings into buffers before passing to _write?
+ // this is here so that some node-core streams can optimize string
+ // handling at a lower level.
+ var noDecode = options.decodeStrings === false;
+ this.decodeStrings = !noDecode;
+
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = options.defaultEncoding || 'utf8';
+
+ // not an actual buffer we keep track of, but a measurement
+ // of how much we're waiting to get pushed to some underlying
+ // socket or file.
+ this.length = 0;
+
+ // a flag to see when we're in the middle of a write.
+ this.writing = false;
+
+ // when true all writes will be buffered until .uncork() call
+ this.corked = 0;
+
+ // a flag to be able to tell if the onwrite cb is called immediately,
+ // or on a later tick. We set this to true at first, because any
+ // actions that shouldn't happen until "later" should generally also
+ // not happen before the first write call.
+ this.sync = true;
+
+ // a flag to know if we're processing previously buffered items, which
+ // may call the _write() callback in the same tick, so that we don't
+ // end up in an overlapped onwrite situation.
+ this.bufferProcessing = false;
+
+ // the callback that's passed to _write(chunk,cb)
+ this.onwrite = function(er) {
+ onwrite(stream, er);
+ };
+
+ // the callback that the user supplies to write(chunk,encoding,cb)
+ this.writecb = null;
+
+ // the amount that is being written when _write is called.
+ this.writelen = 0;
+
+ this.bufferedRequest = null;
+ this.lastBufferedRequest = null;
+
+ // number of pending user-supplied write callbacks
+ // this must be 0 before 'finish' can be emitted
+ this.pendingcb = 0;
+
+ // emit prefinish if the only thing we're waiting for is _write cbs
+ // This is relevant for synchronous Transform streams
+ this.prefinished = false;
+
+ // True if the error was already emitted and should not be thrown again
+ this.errorEmitted = false;
+}
+
+WritableState.prototype.getBuffer = function writableStateGetBuffer() {
+ var current = this.bufferedRequest;
+ var out = [];
+ while (current) {
+ out.push(current);
+ current = current.next;
+ }
+ return out;
+};
+
+(function (){try {
+Object.defineProperty(WritableState.prototype, 'buffer', {
+ get: internalUtil.deprecate(function() {
+ return this.getBuffer();
+ }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' +
+ 'instead.')
+});
+}catch(_){}}());
+
+
+function Writable(options) {
+ var Duplex = require('./_stream_duplex');
+
+ // Writable ctor is applied to Duplexes, though they're not
+ // instanceof Writable, they're instanceof Readable.
+ if (!(this instanceof Writable) && !(this instanceof Duplex))
+ return new Writable(options);
+
+ this._writableState = new WritableState(options, this);
+
+ // legacy.
+ this.writable = true;
+
+ if (options) {
+ if (typeof options.write === 'function')
+ this._write = options.write;
+
+ if (typeof options.writev === 'function')
+ this._writev = options.writev;
+ }
+
+ Stream.call(this);
+}
+
+// Otherwise people can pipe Writable streams, which is just wrong.
+Writable.prototype.pipe = function() {
+ this.emit('error', new Error('Cannot pipe. Not readable.'));
+};
+
+
+function writeAfterEnd(stream, cb) {
+ var er = new Error('write after end');
+ // TODO: defer error events consistently everywhere, not just the cb
+ stream.emit('error', er);
+ processNextTick(cb, er);
+}
+
+// If we get something that is not a buffer, string, null, or undefined,
+// and we're not in objectMode, then that's an error.
+// Otherwise stream chunks are all considered to be of length=1, and the
+// watermarks determine how many objects to keep in the buffer, rather than
+// how many bytes or characters.
+function validChunk(stream, state, chunk, cb) {
+ var valid = true;
+
+ if (!(Buffer.isBuffer(chunk)) &&
+ typeof chunk !== 'string' &&
+ chunk !== null &&
+ chunk !== undefined &&
+ !state.objectMode) {
+ var er = new TypeError('Invalid non-string/buffer chunk');
+ stream.emit('error', er);
+ processNextTick(cb, er);
+ valid = false;
+ }
+ return valid;
+}
+
+Writable.prototype.write = function(chunk, encoding, cb) {
+ var state = this._writableState;
+ var ret = false;
+
+ if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = null;
+ }
+
+ if (Buffer.isBuffer(chunk))
+ encoding = 'buffer';
+ else if (!encoding)
+ encoding = state.defaultEncoding;
+
+ if (typeof cb !== 'function')
+ cb = nop;
+
+ if (state.ended)
+ writeAfterEnd(this, cb);
+ else if (validChunk(this, state, chunk, cb)) {
+ state.pendingcb++;
+ ret = writeOrBuffer(this, state, chunk, encoding, cb);
+ }
+
+ return ret;
+};
+
+Writable.prototype.cork = function() {
+ var state = this._writableState;
+
+ state.corked++;
+};
+
+Writable.prototype.uncork = function() {
+ var state = this._writableState;
+
+ if (state.corked) {
+ state.corked--;
+
+ if (!state.writing &&
+ !state.corked &&
+ !state.finished &&
+ !state.bufferProcessing &&
+ state.bufferedRequest)
+ clearBuffer(this, state);
+ }
+};
+
+Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
+ // node::ParseEncoding() requires lower case.
+ if (typeof encoding === 'string')
+ encoding = encoding.toLowerCase();
+ if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64',
+'ucs2', 'ucs-2','utf16le', 'utf-16le', 'raw']
+.indexOf((encoding + '').toLowerCase()) > -1))
+ throw new TypeError('Unknown encoding: ' + encoding);
+ this._writableState.defaultEncoding = encoding;
+};
+
+function decodeChunk(state, chunk, encoding) {
+ if (!state.objectMode &&
+ state.decodeStrings !== false &&
+ typeof chunk === 'string') {
+ chunk = new Buffer(chunk, encoding);
+ }
+ return chunk;
+}
+
+// if we're already writing something, then just put this
+// in the queue, and wait our turn. Otherwise, call _write
+// If we return false, then we need a drain event, so set that flag.
+function writeOrBuffer(stream, state, chunk, encoding, cb) {
+ chunk = decodeChunk(state, chunk, encoding);
+
+ if (Buffer.isBuffer(chunk))
+ encoding = 'buffer';
+ var len = state.objectMode ? 1 : chunk.length;
+
+ state.length += len;
+
+ var ret = state.length < state.highWaterMark;
+ // we must ensure that previous needDrain will not be reset to false.
+ if (!ret)
+ state.needDrain = true;
+
+ if (state.writing || state.corked) {
+ var last = state.lastBufferedRequest;
+ state.lastBufferedRequest = new WriteReq(chunk, encoding, cb);
+ if (last) {
+ last.next = state.lastBufferedRequest;
+ } else {
+ state.bufferedRequest = state.lastBufferedRequest;
+ }
+ } else {
+ doWrite(stream, state, false, len, chunk, encoding, cb);
+ }
+
+ return ret;
+}
+
+function doWrite(stream, state, writev, len, chunk, encoding, cb) {
+ state.writelen = len;
+ state.writecb = cb;
+ state.writing = true;
+ state.sync = true;
+ if (writev)
+ stream._writev(chunk, state.onwrite);
+ else
+ stream._write(chunk, encoding, state.onwrite);
+ state.sync = false;
+}
+
+function onwriteError(stream, state, sync, er, cb) {
+ --state.pendingcb;
+ if (sync)
+ processNextTick(cb, er);
+ else
+ cb(er);
+
+ stream._writableState.errorEmitted = true;
+ stream.emit('error', er);
+}
+
+function onwriteStateUpdate(state) {
+ state.writing = false;
+ state.writecb = null;
+ state.length -= state.writelen;
+ state.writelen = 0;
+}
+
+function onwrite(stream, er) {
+ var state = stream._writableState;
+ var sync = state.sync;
+ var cb = state.writecb;
+
+ onwriteStateUpdate(state);
+
+ if (er)
+ onwriteError(stream, state, sync, er, cb);
+ else {
+ // Check if we're actually ready to finish, but don't emit yet
+ var finished = needFinish(state);
+
+ if (!finished &&
+ !state.corked &&
+ !state.bufferProcessing &&
+ state.bufferedRequest) {
+ clearBuffer(stream, state);
+ }
+
+ if (sync) {
+ processNextTick(afterWrite, stream, state, finished, cb);
+ } else {
+ afterWrite(stream, state, finished, cb);
+ }
+ }
+}
+
+function afterWrite(stream, state, finished, cb) {
+ if (!finished)
+ onwriteDrain(stream, state);
+ state.pendingcb--;
+ cb();
+ finishMaybe(stream, state);
+}
+
+// Must force callback to be called on nextTick, so that we don't
+// emit 'drain' before the write() consumer gets the 'false' return
+// value, and has a chance to attach a 'drain' listener.
+function onwriteDrain(stream, state) {
+ if (state.length === 0 && state.needDrain) {
+ state.needDrain = false;
+ stream.emit('drain');
+ }
+}
+
+
+// if there's something in the buffer waiting, then process it
+function clearBuffer(stream, state) {
+ state.bufferProcessing = true;
+ var entry = state.bufferedRequest;
+
+ if (stream._writev && entry && entry.next) {
+ // Fast case, write everything using _writev()
+ var buffer = [];
+ var cbs = [];
+ while (entry) {
+ cbs.push(entry.callback);
+ buffer.push(entry);
+ entry = entry.next;
+ }
+
+ // count the one we are adding, as well.
+ // TODO(isaacs) clean this up
+ state.pendingcb++;
+ state.lastBufferedRequest = null;
+ doWrite(stream, state, true, state.length, buffer, '', function(err) {
+ for (var i = 0; i < cbs.length; i++) {
+ state.pendingcb--;
+ cbs[i](err);
+ }
+ });
+
+ // Clear buffer
+ } else {
+ // Slow case, write chunks one-by-one
+ while (entry) {
+ var chunk = entry.chunk;
+ var encoding = entry.encoding;
+ var cb = entry.callback;
+ var len = state.objectMode ? 1 : chunk.length;
+
+ doWrite(stream, state, false, len, chunk, encoding, cb);
+ entry = entry.next;
+ // if we didn't call the onwrite immediately, then
+ // it means that we need to wait until it does.
+ // also, that means that the chunk and cb are currently
+ // being processed, so move the buffer counter past them.
+ if (state.writing) {
+ break;
+ }
+ }
+
+ if (entry === null)
+ state.lastBufferedRequest = null;
+ }
+ state.bufferedRequest = entry;
+ state.bufferProcessing = false;
+}
+
+Writable.prototype._write = function(chunk, encoding, cb) {
+ cb(new Error('not implemented'));
+};
+
+Writable.prototype._writev = null;
+
+Writable.prototype.end = function(chunk, encoding, cb) {
+ var state = this._writableState;
+
+ if (typeof chunk === 'function') {
+ cb = chunk;
+ chunk = null;
+ encoding = null;
+ } else if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = null;
+ }
+
+ if (chunk !== null && chunk !== undefined)
+ this.write(chunk, encoding);
+
+ // .end() fully uncorks
+ if (state.corked) {
+ state.corked = 1;
+ this.uncork();
+ }
+
+ // ignore unnecessary end() calls.
+ if (!state.ending && !state.finished)
+ endWritable(this, state, cb);
+};
+
+
+function needFinish(state) {
+ return (state.ending &&
+ state.length === 0 &&
+ state.bufferedRequest === null &&
+ !state.finished &&
+ !state.writing);
+}
+
+function prefinish(stream, state) {
+ if (!state.prefinished) {
+ state.prefinished = true;
+ stream.emit('prefinish');
+ }
+}
+
+function finishMaybe(stream, state) {
+ var need = needFinish(state);
+ if (need) {
+ if (state.pendingcb === 0) {
+ prefinish(stream, state);
+ state.finished = true;
+ stream.emit('finish');
+ } else {
+ prefinish(stream, state);
+ }
+ }
+ return need;
+}
+
+function endWritable(stream, state, cb) {
+ state.ending = true;
+ finishMaybe(stream, state);
+ if (cb) {
+ if (state.finished)
+ processNextTick(cb);
+ else
+ stream.once('finish', cb);
+ }
+ state.ended = true;
+}
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..191b294
--- /dev/null
+++ b/package.json
@@ -0,0 +1,37 @@
+{
+ "name": "readable-stream",
+ "version": "2.0.3",
+ "description": "Streams3, a user-land copy of the stream library from iojs v2.x",
+ "main": "readable.js",
+ "dependencies": {
+ "core-util-is": "~1.0.0",
+ "inherits": "~2.0.1",
+ "isarray": "0.0.1",
+ "process-nextick-args": "~1.0.0",
+ "string_decoder": "~0.10.x",
+ "util-deprecate": "~1.0.1"
+ },
+ "devDependencies": {
+ "tap": "~0.2.6",
+ "tape": "~4.0.0",
+ "zuul": "~3.0.0"
+ },
+ "scripts": {
+ "test": "tap test/parallel/*.js",
+ "browser": "npm run write-zuul && zuul -- test/browser.js",
+ "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/nodejs/readable-stream"
+ },
+ "keywords": [
+ "readable",
+ "stream",
+ "pipe"
+ ],
+ "browser": {
+ "util": false
+ },
+ "license": "MIT"
+}
diff --git a/passthrough.js b/passthrough.js
new file mode 100644
index 0000000..27e8d8a
--- /dev/null
+++ b/passthrough.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_passthrough.js")
diff --git a/readable.js b/readable.js
new file mode 100644
index 0000000..6222a57
--- /dev/null
+++ b/readable.js
@@ -0,0 +1,12 @@
+var Stream = (function (){
+ try {
+ return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify
+ } catch(_){}
+}());
+exports = module.exports = require('./lib/_stream_readable.js');
+exports.Stream = Stream || exports;
+exports.Readable = exports;
+exports.Writable = require('./lib/_stream_writable.js');
+exports.Duplex = require('./lib/_stream_duplex.js');
+exports.Transform = require('./lib/_stream_transform.js');
+exports.PassThrough = require('./lib/_stream_passthrough.js');
diff --git a/test/browser.js b/test/browser.js
new file mode 100644
index 0000000..6f228bf
--- /dev/null
+++ b/test/browser.js
@@ -0,0 +1,62 @@
+if (!global.console) {
+ global.console = {};
+}
+if (!global.console.log) {
+ global.console.log = function () {};
+}
+if (!global.console.error) {
+ global.console.error = global.console.log;
+}
+if (!global.console.info) {
+ global.console.info = global.console.log;
+}
+var test = require('tape');
+
+test('streams', function (t) {
+ require('./browser/test-stream-big-packet')(t);
+ require('./browser/test-stream-big-push')(t);
+ require('./browser/test-stream-duplex')(t);
+ require('./browser/test-stream-end-paused')(t);
+ require('./browser/test-stream-ispaused')(t);
+ require('./browser/test-stream-pipe-after-end')(t);
+ require('./browser/test-stream-pipe-cleanup')(t);
+ require('./browser/test-stream-pipe-error-handling')(t);
+ require('./browser/test-stream-pipe-event')(t);
+ require('./browser/test-stream-push-order')(t);
+ require('./browser/test-stream-push-strings')(t);
+ require('./browser/test-stream-readable-constructor-set-methods')(t);
+ require('./browser/test-stream-readable-event')(t);
+ require('./browser/test-stream-transform-constructor-set-methods')(t);
+ require('./browser/test-stream-transform-objectmode-falsey-value')(t);
+ require('./browser/test-stream-transform-split-objectmode')(t);
+ require('./browser/test-stream-unshift-empty-chunk')(t);
+ require('./browser/test-stream-unshift-read-race')(t);
+ require('./browser/test-stream-writable-change-default-encoding')(t);
+ require('./browser/test-stream-writable-constructor-set-methods')(t);
+ require('./browser/test-stream-writable-decoded-encoding')(t);
+ require('./browser/test-stream-writev')(t);
+ require('./browser/test-stream-pipe-without-listenerCount');
+});
+
+test('streams 2', function (t) {
+ require('./browser/test-stream2-base64-single-char-read-end')(t);
+ require('./browser/test-stream2-compatibility')(t);
+ require('./browser/test-stream2-large-read-stall')(t);
+ require('./browser/test-stream2-objects')(t);
+ require('./browser/test-stream2-pipe-error-handling')(t);
+ require('./browser/test-stream2-pipe-error-once-listener')(t);
+ require('./browser/test-stream2-push')(t);
+ require('./browser/test-stream2-readable-empty-buffer-no-eof')(t);
+ require('./browser/test-stream2-readable-from-list')(t);
+ require('./browser/test-stream2-transform')(t);
+ require('./browser/test-stream2-set-encoding')(t);
+ require('./browser/test-stream2-readable-legacy-drain')(t);
+ require('./browser/test-stream2-readable-wrap-empty')(t);
+ require('./browser/test-stream2-readable-non-empty-end')(t);
+ require('./browser/test-stream2-readable-wrap')(t);
+ require('./browser/test-stream2-unpipe-drain')(t);
+ require('./browser/test-stream2-writable')(t);
+});
+test('streams 3', function (t) {
+ require('./browser/test-stream3-pause-then-read')(t);
+});
diff --git a/test/browser/test-stream-big-packet.js b/test/browser/test-stream-big-packet.js
new file mode 100644
index 0000000..8670e02
--- /dev/null
+++ b/test/browser/test-stream-big-packet.js
@@ -0,0 +1,62 @@
+'use strict';
+var common = require('../common');
+var inherits = require('inherits');
+var stream = require('../../');
+
+module.exports = function (t) {
+ t.test('big packet', function (t) {
+ t.plan(3);
+ var passed = false;
+
+ function PassThrough() {
+ stream.Transform.call(this);
+ };
+ inherits(PassThrough, stream.Transform);
+ PassThrough.prototype._transform = function(chunk, encoding, done) {
+ this.push(chunk);
+ done();
+ };
+
+ function TestStream() {
+ stream.Transform.call(this);
+ };
+ inherits(TestStream, stream.Transform);
+ TestStream.prototype._transform = function(chunk, encoding, done) {
+ if (!passed) {
+ // Char 'a' only exists in the last write
+ passed = indexOf(chunk.toString(), 'a') >= 0;
+ }
+ if (passed) {
+ t.ok(passed);
+ }
+ done();
+ };
+
+ var s1 = new PassThrough();
+ var s2 = new PassThrough();
+ var s3 = new TestStream();
+ s1.pipe(s3);
+ // Don't let s2 auto close which may close s3
+ s2.pipe(s3, {end: false});
+
+ // We must write a buffer larger than highWaterMark
+ var big = new Buffer(s1._writableState.highWaterMark + 1);
+ big.fill('x');
+
+ // Since big is larger than highWaterMark, it will be buffered internally.
+ t.ok(!s1.write(big));
+ // 'tiny' is small enough to pass through internal buffer.
+ t.ok(s2.write('tiny'));
+
+ // Write some small data in next IO loop, which will never be written to s3
+ // Because 'drain' event is not emitted from s1 and s1 is still paused
+ setImmediate(s1.write.bind(s1), 'later');
+
+ function indexOf (xs, x) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) return i;
+ }
+ return -1;
+ }
+ });
+}
diff --git a/test/browser/test-stream-big-push.js b/test/browser/test-stream-big-push.js
new file mode 100644
index 0000000..7403e16
--- /dev/null
+++ b/test/browser/test-stream-big-push.js
@@ -0,0 +1,68 @@
+'use strict';
+var common = require('../common');
+var stream = require('../../');
+module.exports = function (t) {
+ t.test('big push', function (t) {
+
+ var str = 'asdfasdfasdfasdfasdf';
+
+ var r = new stream.Readable({
+ highWaterMark: 5,
+ encoding: 'utf8'
+ });
+
+ var reads = 0;
+ var eofed = false;
+ var ended = false;
+
+ r._read = function(n) {
+ if (reads === 0) {
+ setTimeout(function() {
+ r.push(str);
+ });
+ reads++;
+ } else if (reads === 1) {
+ var ret = r.push(str);
+ t.equal(ret, false);
+ reads++;
+ } else {
+ t.notOk(eofed);
+ eofed = true;
+ r.push(null);
+ }
+ };
+
+ r.on('end', function() {
+ ended = true;
+ });
+
+ // push some data in to start.
+ // we've never gotten any read event at this point.
+ var ret = r.push(str);
+ // should be false. > hwm
+ t.notOk(ret);
+ var chunk = r.read();
+ t.equal(chunk, str);
+ chunk = r.read();
+ t.equal(chunk, null);
+
+ r.once('readable', function() {
+ // this time, we'll get *all* the remaining data, because
+ // it's been added synchronously, as the read WOULD take
+ // us below the hwm, and so it triggered a _read() again,
+ // which synchronously added more, which we then return.
+ chunk = r.read();
+ t.equal(chunk, str + str);
+
+ chunk = r.read();
+ t.equal(chunk, null);
+ });
+
+ r.on('end', function() {
+ t.ok(eofed);
+ t.ok(ended);
+ t.equal(reads, 2);
+ t.end();
+ });
+ });
+}
diff --git a/test/browser/test-stream-duplex.js b/test/browser/test-stream-duplex.js
new file mode 100644
index 0000000..9bfd6af
--- /dev/null
+++ b/test/browser/test-stream-duplex.js
@@ -0,0 +1,35 @@
+'use strict';
+var common = require('../common');
+
+var Duplex = require('../../').Transform;
+
+var stream = new Duplex({ objectMode: true });
+module.exports = function (t) {
+ t.test('duplex', function (t) {
+ t.plan(4);
+ t.ok(stream._readableState.objectMode);
+ t.ok(stream._writableState.objectMode);
+
+ var written;
+ var read;
+
+ stream._write = function(obj, _, cb) {
+ written = obj;
+ cb();
+ };
+
+ stream._read = function() {};
+
+ stream.on('data', function(obj) {
+ read = obj;
+ });
+
+ stream.push({ val: 1 });
+ stream.end({ val: 2 });
+
+ stream.on('end', function() {
+ t.equal(read.val, 1);
+ t.equal(written.val, 2);
+ });
+ });
+}
diff --git a/test/browser/test-stream-end-paused.js b/test/browser/test-stream-end-paused.js
new file mode 100644
index 0000000..ff56dd8
--- /dev/null
+++ b/test/browser/test-stream-end-paused.js
@@ -0,0 +1,32 @@
+'use strict';
+var common = require('../common');
+
+
+// Make sure we don't miss the end event for paused 0-length streams
+
+var Readable = require('../../').Readable;
+var stream = new Readable();
+module.exports = function (t) {
+ t.test('end pause', function (t) {
+ t.plan(2);
+ var calledRead = false;
+ stream._read = function() {
+ t.notOk(calledRead);
+ calledRead = true;
+ this.push(null);
+ };
+
+ stream.on('data', function() {
+ throw new Error('should not ever get data');
+ });
+ stream.pause();
+
+ setTimeout(function() {
+ stream.on('end', function() {
+ t.ok(calledRead);
+ });
+ stream.resume();
+ });
+
+ });
+}
diff --git a/test/browser/test-stream-ispaused.js b/test/browser/test-stream-ispaused.js
new file mode 100644
index 0000000..d080f41
--- /dev/null
+++ b/test/browser/test-stream-ispaused.js
@@ -0,0 +1,27 @@
+'use strict';
+var common = require('../common');
+
+var stream = require('../../');
+module.exports = function (t) {
+ t.test('is paused', function (t) {
+ var readable = new stream.Readable();
+
+ // _read is a noop, here.
+ readable._read = Function();
+
+ // default state of a stream is not "paused"
+ t.notOk(readable.isPaused());
+
+ // make the stream start flowing...
+ readable.on('data', Function());
+
+ // still not paused.
+ t.notOk(readable.isPaused());
+
+ readable.pause();
+ t.ok(readable.isPaused());
+ readable.resume();
+ t.notOk(readable.isPaused());
+ t.end();
+ });
+}
diff --git a/test/browser/test-stream-pipe-after-end.js b/test/browser/test-stream-pipe-after-end.js
new file mode 100644
index 0000000..0ca97b3
--- /dev/null
+++ b/test/browser/test-stream-pipe-after-end.js
@@ -0,0 +1,64 @@
+'use strict';
+var common = require('../common');
+
+var Readable = require('../../lib/_stream_readable');
+var Writable = require('../../lib/_stream_writable');
+var inherits = require('inherits');
+module.exports = function (t) {
+ t.test('pipe after end', function (t) {
+ t.plan(4);
+ inherits(TestReadable, Readable);
+ function TestReadable(opt) {
+ if (!(this instanceof TestReadable))
+ return new TestReadable(opt);
+ Readable.call(this, opt);
+ this._ended = false;
+ }
+
+ TestReadable.prototype._read = function(n) {
+ if (this._ended)
+ this.emit('error', new Error('_read called twice'));
+ this._ended = true;
+ this.push(null);
+ };
+
+ inherits(TestWritable, Writable);
+ function TestWritable(opt) {
+ if (!(this instanceof TestWritable))
+ return new TestWritable(opt);
+ Writable.call(this, opt);
+ this._written = [];
+ }
+
+ TestWritable.prototype._write = function(chunk, encoding, cb) {
+ this._written.push(chunk);
+ cb();
+ };
+
+ // this one should not emit 'end' until we read() from it later.
+ var ender = new TestReadable();
+ var enderEnded = false;
+
+ // what happens when you pipe() a Readable that's already ended?
+ var piper = new TestReadable();
+ // pushes EOF null, and length=0, so this will trigger 'end'
+ piper.read();
+
+ setTimeout(function() {
+ ender.on('end', function() {
+ enderEnded = true;
+ t.ok(true, 'enderEnded');
+ });
+ t.notOk(enderEnded);
+ var c = ender.read();
+ t.equal(c, null);
+
+ var w = new TestWritable();
+ w.on('finish', function() {
+ t.ok(true, 'writableFinished');
+ });
+ piper.pipe(w);
+
+ });
+ });
+}
diff --git a/test/browser/test-stream-pipe-cleanup.js b/test/browser/test-stream-pipe-cleanup.js
new file mode 100644
index 0000000..dd2b6d5
--- /dev/null
+++ b/test/browser/test-stream-pipe-cleanup.js
@@ -0,0 +1,108 @@
+'use strict';
+// This test asserts that Stream.prototype.pipe does not leave listeners
+// hanging on the source or dest.
+
+var common = require('../common');
+var stream = require('../../');
+var inherits = require('inherits');
+module.exports = function (t) {
+ t.test('pipe cleanup', function (t) {
+ if (/^v0\.8\./.test(process.version))
+ return t.end();
+
+ function Writable() {
+ this.writable = true;
+ this.endCalls = 0;
+ require('stream').Stream.call(this);
+ }
+ inherits(Writable, require('stream').Stream);
+ Writable.prototype.end = function() {
+ this.endCalls++;
+ };
+
+ Writable.prototype.destroy = function() {
+ this.endCalls++;
+ };
+
+ function Readable() {
+ this.readable = true;
+ require('stream').Stream.call(this);
+ }
+ inherits(Readable, require('stream').Stream);
+
+ function Duplex() {
+ this.readable = true;
+ Writable.call(this);
+ }
+ inherits(Duplex, Writable);
+
+ var i = 0;
+ var limit = 100;
+
+ var w = new Writable();
+
+ var r;
+
+ for (i = 0; i < limit; i++) {
+ r = new Readable();
+ r.pipe(w);
+ r.emit('end');
+ }
+ t.equal(0, r.listeners('end').length);
+ t.equal(limit, w.endCalls);
+
+ w.endCalls = 0;
+
+ for (i = 0; i < limit; i++) {
+ r = new Readable();
+ r.pipe(w);
+ r.emit('close');
+ }
+ t.equal(0, r.listeners('close').length);
+ t.equal(limit, w.endCalls);
+
+ w.endCalls = 0;
+
+ r = new Readable();
+
+ for (i = 0; i < limit; i++) {
+ w = new Writable();
+ r.pipe(w);
+ w.emit('close');
+ }
+ t.equal(0, w.listeners('close').length);
+
+ r = new Readable();
+ w = new Writable();
+ var d = new Duplex();
+ r.pipe(d); // pipeline A
+ d.pipe(w); // pipeline B
+ t.equal(r.listeners('end').length, 2); // A.onend, A.cleanup
+ t.equal(r.listeners('close').length, 2); // A.onclose, A.cleanup
+ t.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
+ t.equal(d.listeners('close').length, 3); // A.cleanup, B.onclose, B.cleanup
+ t.equal(w.listeners('end').length, 0);
+ t.equal(w.listeners('close').length, 1); // B.cleanup
+
+ r.emit('end');
+ t.equal(d.endCalls, 1);
+ t.equal(w.endCalls, 0);
+ t.equal(r.listeners('end').length, 0);
+ t.equal(r.listeners('close').length, 0);
+ t.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
+ t.equal(d.listeners('close').length, 2); // B.onclose, B.cleanup
+ t.equal(w.listeners('end').length, 0);
+ t.equal(w.listeners('close').length, 1); // B.cleanup
+
+ d.emit('end');
+ t.equal(d.endCalls, 1);
+ t.equal(w.endCalls, 1);
+ t.equal(r.listeners('end').length, 0);
+ t.equal(r.listeners('close').length, 0);
+ t.equal(d.listeners('end').length, 0);
+ t.equal(d.listeners('close').length, 0);
+ t.equal(w.listeners('end').length, 0);
+ t.equal(w.listeners('close').length, 0);
+ t.end();
+ });
+}
diff --git a/test/browser/test-stream-pipe-error-handling.js b/test/browser/test-stream-pipe-error-handling.js
new file mode 100644
index 0000000..48a8bb3
--- /dev/null
+++ b/test/browser/test-stream-pipe-error-handling.js
@@ -0,0 +1,102 @@
+'use strict';
+var common = require('../common');
+var Stream = require('stream').Stream;
+
+module.exports = function (t) {
+ t.test('Error Listener Catches', function (t) {
+ t.plan(1);
+ var source = new Stream();
+ var dest = new Stream();
+
+ source.pipe(dest);
+
+ var gotErr = null;
+ source.on('error', function(err) {
+ gotErr = err;
+ });
+
+ var err = new Error('This stream turned into bacon.');
+ source.emit('error', err);
+ t.strictEqual(gotErr, err);
+ });
+
+ t.test('Error WithoutListener Throws', function (t) {
+ t.plan(1);
+ var source = new Stream();
+ var dest = new Stream();
+
+ source.pipe(dest);
+
+ var err = new Error('This stream turned into bacon.');
+
+ var gotErr = null;
+ try {
+ source.emit('error', err);
+ } catch (e) {
+ gotErr = e;
+ }
+
+ t.strictEqual(gotErr, err);
+ });
+
+ t.test('Error With Removed Listener Throws', function (t) {
+ t.plan(2);
+ var EE = require('events').EventEmitter;
+ var R = require('../../').Readable;
+ var W = require('../../').Writable;
+
+ var r = new R();
+ var w = new W();
+ var removed = false;
+
+ r._read = function() {
+ setTimeout(function() {
+ t.ok(removed);
+ t.throws(function() {
+ w.emit('error', new Error('fail'));
+ });
+ });
+ };
+
+ w.on('error', myOnError);
+ r.pipe(w);
+ w.removeListener('error', myOnError);
+ removed = true;
+
+ function myOnError(er) {
+ throw new Error('this should not happen');
+ }
+ });
+
+ t.test('Error With Removed Listener Throws', function (t) {
+ t.plan(2);
+ var EE = require('events').EventEmitter;
+ var R = require('../../').Readable;
+ var W = require('../../').Writable;
+
+ var r = new R();
+ var w = new W();
+ var removed = false;
+ var caught = false;
+
+ r._read = function() {
+ setTimeout(function() {
+ t.ok(removed);
+ w.emit('error', new Error('fail'));
+ });
+ };
+
+ w.on('error', myOnError);
+ w._write = function() {};
+
+ r.pipe(w);
+ // Removing some OTHER random listener should not do anything
+ w.removeListener('error', function() {});
+ removed = true;
+
+ function myOnError(er) {
+ t.notOk(caught);
+ caught = true;
+ }
+ });
+}
diff --git a/test/browser/test-stream-pipe-event.js b/test/browser/test-stream-pipe-event.js
new file mode 100644
index 0000000..c0d7a60
--- /dev/null
+++ b/test/browser/test-stream-pipe-event.js
@@ -0,0 +1,32 @@
+'use strict';
+var common = require('../common');
+var stream = require('../../');
+var inherits = require('inherits');
+module.exports = function (t) {
+ t.test('pipe event', function (t) {
+ t.plan(1);
+ function Writable() {
+ this.writable = true;
+ require('stream').Stream.call(this);
+ }
+ inherits(Writable, require('stream').Stream);
+
+ function Readable() {
+ this.readable = true;
+ require('stream').Stream.call(this);
+ }
+ inherits(Readable, require('stream').Stream);
+
+ var passed = false;
+
+ var w = new Writable();
+ w.on('pipe', function(src) {
+ passed = true;
+ });
+
+ var r = new Readable();
+ r.pipe(w);
+
+ t.ok(passed);
+ });
+}
diff --git a/test/browser/test-stream-pipe-without-listenerCount.js b/test/browser/test-stream-pipe-without-listenerCount.js
new file mode 100644
index 0000000..742e2d2
--- /dev/null
+++ b/test/browser/test-stream-pipe-without-listenerCount.js
@@ -0,0 +1,27 @@
+'use strict';
+var Stream = require('../../');
+
+module.exports = function (t) {
+ t.tets('pipe without listenerCount', function (t) {
+ t.plan(2);
+ var r = new Stream({
+ read: function (){}});
+ r.listenerCount = undefined;
+
+ var w = new Stream();
+ w.listenerCount = undefined;
+
+ w.on('pipe', function() {
+ r.emit('error', new Error('Readable Error'));
+ w.emit('error', new Error('Writable Error'));
+ });
+ r.on('error', function (e) {
+ t.ok(e, 'readable error');
+ });
+ w.on('error', function (e) {
+ t.ok(e, 'writable error');
+ });
+ r.pipe(w);
+
+ });
+}
diff --git a/test/browser/test-stream-push-order.js b/test/browser/test-stream-push-order.js
new file mode 100644
index 0000000..acffd41
--- /dev/null
+++ b/test/browser/test-stream-push-order.js
@@ -0,0 +1,34 @@
+'use strict';
+var common = require('../common');
+var Readable = require('../../').Readable;
+module.exports = function (t) {
+ t.test('push order', function (t) {
+ t.plan(1);
+ var s = new Readable({
+ highWaterMark: 20,
+ encoding: 'ascii'
+ });
+
+ var list = ['1', '2', '3', '4', '5', '6'];
+
+ s._read = function(n) {
+ var one = list.shift();
+ if (!one) {
+ s.push(null);
+ } else {
+ var two = list.shift();
+ s.push(one);
+ s.push(two);
+ }
+ };
+
+ var v = s.read(0);
+
+ // ACTUALLY [1, 3, 5, 6, 4, 2]
+
+ setTimeout(function() {
+ t.deepEqual(s._readableState.buffer,
+ ['1', '2', '3', '4', '5', '6']);
+ });
+ });
+}
diff --git a/test/browser/test-stream-push-strings.js b/test/browser/test-stream-push-strings.js
new file mode 100644
index 0000000..1de240e
--- /dev/null
+++ b/test/browser/test-stream-push-strings.js
@@ -0,0 +1,49 @@
+'use strict';
+var common = require('../common');
+
+var Readable = require('../../').Readable;
+var inherits = require('inherits');
+
+module.exports = function (t) {
+ t.test('push strings', function (t) {
+ t.plan(2);
+ inherits(MyStream, Readable);
+ function MyStream(options) {
+ Readable.call(this, options);
+ this._chunks = 3;
+ }
+
+ MyStream.prototype._read = function(n) {
+ switch (this._chunks--) {
+ case 0:
+ return this.push(null);
+ case 1:
+ return setTimeout(function() {
+ this.push('last chunk');
+ }.bind(this), 100);
+ case 2:
+ return this.push('second to last chunk');
+ case 3:
+ return process.nextTick(function() {
+ this.push('first chunk');
+ }.bind(this));
+ default:
+ throw new Error('?');
+ }
+ };
+ var expect = [ 'first chunksecond to last chunk', 'last chunk' ];
+
+ var ms = new MyStream();
+ var results = [];
+ ms.on('readable', function() {
+ var chunk;
+ while (null !== (chunk = ms.read()))
+ results.push(chunk + '');
+ });
+
+ ms.on('end', function() {
+ t.equal(ms._chunks, -1);
+ t.deepEqual(results, expect);
+ });
+ });
+}
diff --git a/test/browser/test-stream-readable-constructor-set-methods.js b/test/browser/test-stream-readable-constructor-set-methods.js
new file mode 100644
index 0000000..fa0d59b
--- /dev/null
+++ b/test/browser/test-stream-readable-constructor-set-methods.js
@@ -0,0 +1,22 @@
+'use strict';
+var common = require('../common');
+
+var Readable = require('../../').Readable;
+module.exports = function (t) {
+ t.test('readable constructor set methods', function (t) {
+ t.plan(2);
+ var _readCalled = false;
+ function _read(n) {
+ _readCalled = true;
+ this.push(null);
+ }
+
+ var r = new Readable({ read: _read });
+ r.resume();
+
+ setTimeout(function() {
+ t.equal(r._read, _read);
+ t.ok(_readCalled);
+ });
+ });
+}
diff --git a/test/browser/test-stream-readable-event.js b/test/browser/test-stream-readable-event.js
new file mode 100644
index 0000000..6afabc3
--- /dev/null
+++ b/test/browser/test-stream-readable-event.js
@@ -0,0 +1,114 @@
+'use strict';
+var common = require('../common');
+
+var Readable = require('../../').Readable;
+
+function first(t) {
+ // First test, not reading when the readable is added.
+ // make sure that on('readable', ...) triggers a readable event.
+ var r = new Readable({
+ highWaterMark: 3
+ });
+
+ var _readCalled = false;
+ r._read = function(n) {
+ _readCalled = true;
+ };
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(new Buffer('blerg'));
+
+ var caughtReadable = false;
+ setTimeout(function() {
+ // we're testing what we think we are
+ t.notOk(r._readableState.reading);
+ r.on('readable', function() {
+ caughtReadable = true;
+ setTimeout(function() {
+ // we're testing what we think we are
+ t.notOk(_readCalled);
+
+ t.ok(caughtReadable);
+ t.end();
+ });
+ });
+ });
+
+
+}
+
+function second(t) {
+ // second test, make sure that readable is re-emitted if there's
+ // already a length, while it IS reading.
+
+ var r = new Readable({
+ highWaterMark: 3
+ });
+
+ var _readCalled = false;
+ r._read = function(n) {
+ _readCalled = true;
+ };
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(new Buffer('bl'));
+
+ var caughtReadable = false;
+ setTimeout(function() {
+ // assert we're testing what we think we are
+ t.ok(r._readableState.reading);
+ r.on('readable', function() {
+ caughtReadable = true;
+ setTimeout(function() {
+ // we're testing what we think we are
+ t.ok(_readCalled);
+
+ t.ok(caughtReadable);
+ t.end();
+ });
+ });
+ });
+
+}
+
+function third(t) {
+ // Third test, not reading when the stream has not passed
+ // the highWaterMark but *has* reached EOF.
+ var r = new Readable({
+ highWaterMark: 30
+ });
+
+ var _readCalled = false;
+ r._read = function(n) {
+ _readCalled = true;
+ };
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(new Buffer('blerg'));
+ r.push(null);
+
+ var caughtReadable = false;
+ setTimeout(function() {
+ // assert we're testing what we think we are
+ t.notOk(r._readableState.reading);
+ r.on('readable', function() {
+ caughtReadable = true;
+ setTimeout(function() {
+ // we're testing what we think we are
+ t.notOk(_readCalled);
+
+ t.ok(caughtReadable);
+ t.end();
+ });
+ });
+ });
+
+};
+
+module.exports = function (t) {
+ t.test('readable events', function (t) {
+ t.test('first', first);
+ t.test('second', second);
+ t.test('third', third);
+ });
+}
diff --git a/test/browser/test-stream-transform-constructor-set-methods.js b/test/browser/test-stream-transform-constructor-set-methods.js
new file mode 100644
index 0000000..de89057
--- /dev/null
+++ b/test/browser/test-stream-transform-constructor-set-methods.js
@@ -0,0 +1,35 @@
+'use strict';
+var common = require('../common');
+
+var Transform = require('../../').Transform;
+module.exports = function (t) {
+ t.test('transform constructor set methods', function (t) {
+ var _transformCalled = false;
+ function _transform(d, e, n) {
+ _transformCalled = true;
+ n();
+ }
+
+ var _flushCalled = false;
+ function _flush(n) {
+ _flushCalled = true;
+ n();
+ }
+
+ var tr = new Transform({
+ transform: _transform,
+ flush: _flush
+ });
+
+ tr.end(new Buffer('blerg'));
+ tr.resume();
+
+ tr.on('end', function() {
+ t.equal(tr._transform, _transform);
+ t.equal(tr._flush, _flush);
+ t.ok(_transformCalled);
+ t.ok(_flushCalled);
+ t.end();
+ });
+ });
+}
diff --git a/test/browser/test-stream-transform-objectmode-falsey-value.js b/test/browser/test-stream-transform-objectmode-falsey-value.js
new file mode 100644
index 0000000..3b226a7
--- /dev/null
+++ b/test/browser/test-stream-transform-objectmode-falsey-value.js
@@ -0,0 +1,36 @@
+'use strict';
+var common = require('../common');
+
+var stream = require('../../');
+var PassThrough = stream.PassThrough;
+module.exports = function (t) {
+ t.test('transform objectmode falsey value', function (t) {
+ var src = new PassThrough({ objectMode: true });
+ var tx = new PassThrough({ objectMode: true });
+ var dest = new PassThrough({ objectMode: true });
+
+ var expect = [ -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ];
+ var results = [];
+ dest.on('end', function() {
+ t.deepEqual(results, expect);
+ t.end();
+ });
+
+ dest.on('data', function(x) {
+ results.push(x);
+ });
+
+ src.pipe(tx).pipe(dest);
+
+ var i = -1;
+ var int = setInterval(function() {
+ if (i > 10) {
+ src.end();
+ clearInterval(int);
+ } else {
+ t.ok(true);
+ src.write(i++);
+ }
+ }, 10);
+ });
+}
diff --git a/test/browser/test-stream-transform-split-objectmode.js b/test/browser/test-stream-transform-split-objectmode.js
new file mode 100644
index 0000000..3813499
--- /dev/null
+++ b/test/browser/test-stream-transform-split-objectmode.js
@@ -0,0 +1,58 @@
+'use strict';
+var common = require('../common');
+
+var Transform = require('../../').Transform;
+module.exports = function (t) {
+ t.test('transform split objectmode', function (t) {
+ t.plan(10);
+ var parser = new Transform({ readableObjectMode : true });
+
+ t.ok(parser._readableState.objectMode, 'parser 1');
+ t.notOk(parser._writableState.objectMode, 'parser 2');
+ t.equals(parser._readableState.highWaterMark, 16, 'parser 3');
+ t.equals(parser._writableState.highWaterMark, (16 * 1024), 'parser 4');
+
+ parser._transform = function(chunk, enc, callback) {
+ callback(null, { val : chunk[0] });
+ };
+
+ var parsed;
+
+ parser.on('data', function(obj) {
+ parsed = obj;
+ });
+
+ parser.end(new Buffer([42]));
+
+ parser.on('end', function() {
+ t.equals(parsed.val, 42, 'parser ended');
+ });
+
+
+ var serializer = new Transform({ writableObjectMode : true });
+
+ t.notOk(serializer._readableState.objectMode, 'serializer 1');
+ t.ok(serializer._writableState.objectMode, 'serializer 2');
+ t.equals(serializer._readableState.highWaterMark, (16 * 1024), 'serializer 3');
+ t.equals(serializer._writableState.highWaterMark, 16, 'serializer 4');
+
+ serializer._transform = function(obj, _, callback) {
+ callback(null, new Buffer([obj.val]));
+ };
+
+ var serialized;
+
+ serializer.on('data', function(chunk) {
+ serialized = chunk;
+ });
+
+ serializer.write({ val : 42 });
+
+ serializer.on('end', function() {
+ t.equals(serialized[0], 42, 'searlizer ended');
+ });
+ setImmediate(function () {
+ serializer.end();
+ });
+ });
+}
diff --git a/test/browser/test-stream-unshift-empty-chunk.js b/test/browser/test-stream-unshift-empty-chunk.js
new file mode 100644
index 0000000..ddeb170
--- /dev/null
+++ b/test/browser/test-stream-unshift-empty-chunk.js
@@ -0,0 +1,63 @@
+'use strict';
+var common = require('../common');
+
+// This test verifies that stream.unshift(Buffer(0)) or
+// stream.unshift('') does not set state.reading=false.
+var Readable = require('../../').Readable;
+module.exports = function (t) {
+ t.test('unshift empty chunk', function (t) {
+ t.plan(1);
+ var r = new Readable();
+ var nChunks = 10;
+ var chunk = new Buffer(10);
+ chunk.fill('x');
+
+ r._read = function(n) {
+ setTimeout(function() {
+ r.push(--nChunks === 0 ? null : chunk);
+ });
+ };
+
+ var readAll = false;
+ var seen = [];
+ r.on('readable', function() {
+ var chunk;
+ while (chunk = r.read()) {
+ seen.push(chunk.toString());
+ // simulate only reading a certain amount of the data,
+ // and then putting the rest of the chunk back into the
+ // stream, like a parser might do. We just fill it with
+ // 'y' so that it's easy to see which bits were touched,
+ // and which were not.
+ var putBack = new Buffer(readAll ? 0 : 5);
+ putBack.fill('y');
+ readAll = !readAll;
+ r.unshift(putBack);
+ }
+ });
+
+ var expect =
+ [ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy' ];
+
+ r.on('end', function() {
+ t.deepEqual(seen, expect);
+ });
+ });
+}
diff --git a/test/browser/test-stream-unshift-read-race.js b/test/browser/test-stream-unshift-read-race.js
new file mode 100644
index 0000000..b2362a7
--- /dev/null
+++ b/test/browser/test-stream-unshift-read-race.js
@@ -0,0 +1,110 @@
+'use strict';
+var common = require('../common');
+
+// This test verifies that:
+// 1. unshift() does not cause colliding _read() calls.
+// 2. unshift() after the 'end' event is an error, but after the EOF
+// signalling null, it is ok, and just creates a new readable chunk.
+// 3. push() after the EOF signaling null is an error.
+// 4. _read() is not called after pushing the EOF null chunk.
+
+var stream = require('../../');
+module.exports = function (t) {
+ t.test('unshift read race', function (tape) {
+ var hwm = 10;
+ var r = stream.Readable({ highWaterMark: hwm });
+ var chunks = 10;
+ var t = (chunks * 5);
+
+ var data = new Buffer(chunks * hwm + Math.ceil(hwm / 2));
+ for (var i = 0; i < data.length; i++) {
+ var c = 'asdf'.charCodeAt(i % 4);
+ data[i] = c;
+ }
+
+ var pos = 0;
+ var pushedNull = false;
+ r._read = function(n) {
+ tape.notOk(pushedNull, '_read after null push');
+
+ // every third chunk is fast
+ push(!(chunks % 3));
+
+ function push(fast) {
+ tape.notOk(pushedNull, 'push() after null push');
+ var c = pos >= data.length ? null : data.slice(pos, Math.min(pos + n, data.length));
+ pushedNull = c === null;
+ if (fast) {
+ pos += n;
+ r.push(c);
+ if (c === null) pushError();
+ } else {
+ setTimeout(function() {
+ pos += n;
+ r.push(c);
+ if (c === null) pushError();
+ });
+ }
+ }
+ };
+
+ function pushError() {
+ tape.throws(function() {
+ r.push(new Buffer(1));
+ });
+ }
+
+
+ var w = stream.Writable();
+ var written = [];
+ w._write = function(chunk, encoding, cb) {
+ written.push(chunk.toString());
+ cb();
+ };
+
+ var ended = false;
+ r.on('end', function() {
+ tape.notOk(ended, 'end emitted more than once');
+ tape.throws(function() {
+ r.unshift(new Buffer(1));
+ });
+ ended = true;
+ w.end();
+ });
+
+ r.on('readable', function() {
+ var chunk;
+ while (null !== (chunk = r.read(10))) {
+ w.write(chunk);
+ if (chunk.length > 4)
+ r.unshift(new Buffer('1234'));
+ }
+ });
+
+ w.on('finish', function() {
+ // each chunk should start with 1234, and then be asfdasdfasdf...
+ // The first got pulled out before the first unshift('1234'), so it's
+ // lacking that piece.
+ tape.equal(written[0], 'asdfasdfas');
+ var asdf = 'd';
+ //console.error('0: %s', written[0]);
+ for (var i = 1; i < written.length; i++) {
+ //console.error('%s: %s', i.toString(32), written[i]);
+ tape.equal(written[i].slice(0, 4), '1234');
+ for (var j = 4; j < written[i].length; j++) {
+ var c = written[i].charAt(j);
+ tape.equal(c, asdf);
+ switch (asdf) {
+ case 'a': asdf = 's'; break;
+ case 's': asdf = 'd'; break;
+ case 'd': asdf = 'f'; break;
+ case 'f': asdf = 'a'; break;
+ }
+ }
+ }
+ tape.equal(written.length, 18);
+ tape.end();
+ });
+
+ });
+}
diff --git a/test/browser/test-stream-writable-change-default-encoding.js b/test/browser/test-stream-writable-change-default-encoding.js
new file mode 100644
index 0000000..de65715
--- /dev/null
+++ b/test/browser/test-stream-writable-change-default-encoding.js
@@ -0,0 +1,64 @@
+'use strict';
+var common = require('../common');
+
+var stream = require('../../');
+var inherits = require('inherits');
+
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options);
+ this.fn = fn;
+};
+
+inherits(MyWritable, stream.Writable);
+
+MyWritable.prototype._write = function(chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding);
+ callback();
+};
+
+function defaultCondingIsUtf8(t) {
+ t.plan(1);
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ t.equal(enc, 'utf8');
+ }, { decodeStrings: false });
+ m.write('foo');
+ m.end();
+}
+
+function changeDefaultEncodingToAscii(t) {
+ t.plan(1);
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ t.equal(enc, 'ascii');
+ }, { decodeStrings: false });
+ m.setDefaultEncoding('ascii');
+ m.write('bar');
+ m.end();
+}
+
+function changeDefaultEncodingToInvalidValue(t) {
+ t.plan(1);
+ t.throws(function () {
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ }, { decodeStrings: false });
+ m.setDefaultEncoding({});
+ m.write('bar');
+ m.end();
+ }, TypeError);
+}
+function checkVairableCaseEncoding(t) {
+ t.plan(1);
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ t.equal(enc, 'ascii');
+ }, { decodeStrings: false });
+ m.setDefaultEncoding('AsCii');
+ m.write('bar');
+ m.end();
+}
+module.exports = function (t) {
+ t.test('writable change default encoding', function (t) {
+ t.test('defaultCondingIsUtf8', defaultCondingIsUtf8);
+ t.test('changeDefaultEncodingToAscii', changeDefaultEncodingToAscii);
+ t.test('changeDefaultEncodingToInvalidValue', changeDefaultEncodingToInvalidValue);
+ t.test('checkVairableCaseEncoding', checkVairableCaseEncoding);
+ });
+}
diff --git a/test/browser/test-stream-writable-constructor-set-methods.js b/test/browser/test-stream-writable-constructor-set-methods.js
new file mode 100644
index 0000000..25a657e
--- /dev/null
+++ b/test/browser/test-stream-writable-constructor-set-methods.js
@@ -0,0 +1,40 @@
+'use strict';
+var common = require('../common');
+var Writable = require('../../').Writable;
+
+module.exports = function (t) {
+ t.test('writable constructor set methods', function (t){
+
+
+ var _writeCalled = false;
+ function _write(d, e, n) {
+ _writeCalled = true;
+ }
+
+ var w = new Writable({ write: _write });
+ w.end(new Buffer('blerg'));
+
+ var _writevCalled = false;
+ var dLength = 0;
+ function _writev(d, n) {
+ dLength = d.length;
+ _writevCalled = true;
+ }
+
+ var w2 = new Writable({ writev: _writev });
+ w2.cork();
+
+ w2.write(new Buffer('blerg'));
+ w2.write(new Buffer('blerg'));
+ w2.end();
+
+ setImmediate(function() {
+ t.equal(w._write, _write);
+ t.ok(_writeCalled);
+ t.equal(w2._writev, _writev);
+ t.equal(dLength, 2);
+ t.ok(_writevCalled);
+ t.end();
+ });
+ });
+}
diff --git a/test/browser/test-stream-writable-decoded-encoding.js b/test/browser/test-stream-writable-decoded-encoding.js
new file mode 100644
index 0000000..f32dd7e
--- /dev/null
+++ b/test/browser/test-stream-writable-decoded-encoding.js
@@ -0,0 +1,45 @@
+'use strict';
+var common = require('../common');
+
+var stream = require('../../');
+var inherits = require('inherits');
+
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options);
+ this.fn = fn;
+};
+
+inherits(MyWritable, stream.Writable);
+
+MyWritable.prototype._write = function(chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding);
+ callback();
+};
+
+function decodeStringsTrue(t) {
+ t.plan(3);
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ t.ok(isBuffer);
+ t.equal(type, 'object');
+ t.equal(enc, 'buffer');
+ //console.log('ok - decoded string is decoded');
+ }, { decodeStrings: true });
+ m.write('some-text', 'utf8');
+ m.end();
+}
+
+function decodeStringsFalse(t) {
+ t.plan(3);
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ t.notOk(isBuffer);
+ t.equal(type, 'string');
+ t.equal(enc, 'utf8');
+ //console.log('ok - un-decoded string is not decoded');
+ }, { decodeStrings: false });
+ m.write('some-text', 'utf8');
+ m.end();
+}
+module.exports = function (t) {
+ t.test('decodeStringsTrue', decodeStringsTrue);
+ t.test('decodeStringsFalse', decodeStringsFalse);
+}
diff --git a/test/browser/test-stream-writev.js b/test/browser/test-stream-writev.js
new file mode 100644
index 0000000..f676f20
--- /dev/null
+++ b/test/browser/test-stream-writev.js
@@ -0,0 +1,105 @@
+'use strict';
+var common = require('../common');
+
+var stream = require('../../');
+
+var queue = [];
+for (var decode = 0; decode < 2; decode++) {
+ for (var uncork = 0; uncork < 2; uncork++) {
+ for (var multi = 0; multi < 2; multi++) {
+ queue.push([!!decode, !!uncork, !!multi]);
+ }
+ }
+}
+
+module.exports = function (t) {
+ t.test('writev', function (t) {
+ queue.forEach(function (tr, i){
+ t.test('round ' + i, test(tr[0], tr[1], tr[2]));
+ });
+ });
+}
+
+function test(decode, uncork, multi) {
+ return function (t) {
+ //console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi);
+ var counter = 0;
+ var expectCount = 0;
+ function cnt(msg) {
+ expectCount++;
+ var expect = expectCount;
+ var called = false;
+ return function(er) {
+ if (er)
+ throw er;
+ called = true;
+ counter++;
+ t.equal(counter, expect);
+ };
+ }
+
+ var w = new stream.Writable({ decodeStrings: decode });
+ w._write = function(chunk, e, cb) {
+ t.ok(false, 'Should not call _write');
+ };
+
+ var expectChunks = decode ?
+ [
+ { encoding: 'buffer',
+ chunk: [104, 101, 108, 108, 111, 44, 32] },
+ { encoding: 'buffer',
+ chunk: [119, 111, 114, 108, 100] },
+ { encoding: 'buffer',
+ chunk: [33] },
+ { encoding: 'buffer',
+ chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] },
+ { encoding: 'buffer',
+ chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173]}
+ ] : [
+ { encoding: 'ascii', chunk: 'hello, ' },
+ { encoding: 'utf8', chunk: 'world' },
+ { encoding: 'buffer', chunk: [33] },
+ { encoding: 'binary', chunk: '\nand then...' },
+ { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' }
+ ];
+
+ var actualChunks;
+ w._writev = function(chunks, cb) {
+ actualChunks = chunks.map(function(chunk) {
+ return {
+ encoding: chunk.encoding,
+ chunk: Buffer.isBuffer(chunk.chunk) ?
+ Array.prototype.slice.call(chunk.chunk) : chunk.chunk
+ };
+ });
+ cb();
+ };
+
+ w.cork();
+ w.write('hello, ', 'ascii', cnt('hello'));
+ w.write('world', 'utf8', cnt('world'));
+
+ if (multi)
+ w.cork();
+
+ w.write(new Buffer('!'), 'buffer', cnt('!'));
+ w.write('\nand then...', 'binary', cnt('and then'));
+
+ if (multi)
+ w.uncork();
+
+ w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'));
+
+ if (uncork)
+ w.uncork();
+
+ w.end(cnt('end'));
+
+ w.on('finish', function() {
+ // make sure finish comes after all the write cb
+ cnt('finish')();
+ t.deepEqual(expectChunks, actualChunks);
+ t.end();
+ });
+ }
+}
diff --git a/test/browser/test-stream2-base64-single-char-read-end.js b/test/browser/test-stream2-base64-single-char-read-end.js
new file mode 100644
index 0000000..c68e66d
--- /dev/null
+++ b/test/browser/test-stream2-base64-single-char-read-end.js
@@ -0,0 +1,41 @@
+'use strict';
+var common = require('../common');
+var R = require('../../lib/_stream_readable');
+var W = require('../../lib/_stream_writable');
+module.exports = function (t) {
+ t.test('base64 single char read end', function (t) {
+ t.plan(1);
+ var src = new R({encoding: 'base64'});
+ var dst = new W();
+ var hasRead = false;
+ var accum = [];
+ var timeout;
+
+ src._read = function(n) {
+ if(!hasRead) {
+ hasRead = true;
+ process.nextTick(function() {
+ src.push(new Buffer('1'));
+ src.push(null);
+ });
+ };
+ };
+
+ dst._write = function(chunk, enc, cb) {
+ accum.push(chunk);
+ cb();
+ };
+
+ src.on('end', function() {
+ t.equal(Buffer.concat(accum) + '', 'MQ==');
+ clearTimeout(timeout);
+ });
+
+ src.pipe(dst);
+
+ timeout = setTimeout(function() {
+ assert.fail('timed out waiting for _write');
+ }, 100);
+
+})
+}
diff --git a/test/browser/test-stream2-compatibility.js b/test/browser/test-stream2-compatibility.js
new file mode 100644
index 0000000..34961a3
--- /dev/null
+++ b/test/browser/test-stream2-compatibility.js
@@ -0,0 +1,33 @@
+'use strict';
+var R = require('../../lib/_stream_readable');
+var inherits = require('inherits');
+var EE = require('events').EventEmitter;
+module.exports = function (t) {
+ t.test('compatibility', function (t) {
+ t.plan(1);
+
+ var ondataCalled = 0;
+
+ function TestReader() {
+ R.apply(this);
+ this._buffer = new Buffer(100);
+ this._buffer.fill('x');
+
+ this.on('data', function() {
+ ondataCalled++;
+ });
+ }
+
+ inherits(TestReader, R);
+
+ TestReader.prototype._read = function(n) {
+ this.push(this._buffer);
+ this._buffer = new Buffer(0);
+ };
+
+ var reader = new TestReader();
+ setTimeout(function() {
+ t.equal(ondataCalled, 1);
+ });
+ });
+}
diff --git a/test/browser/test-stream2-large-read-stall.js b/test/browser/test-stream2-large-read-stall.js
new file mode 100644
index 0000000..74be495
--- /dev/null
+++ b/test/browser/test-stream2-large-read-stall.js
@@ -0,0 +1,62 @@
+'use strict';
+var common = require('../common');
+module.exports = function (t) {
+ t.test('large object read stall', function (t) {
+
+// If everything aligns so that you do a read(n) of exactly the
+// remaining buffer, then make sure that 'end' still emits.
+
+ var READSIZE = 100;
+ var PUSHSIZE = 20;
+ var PUSHCOUNT = 1000;
+ var HWM = 50;
+
+ var Readable = require('../../').Readable;
+ var r = new Readable({
+ highWaterMark: HWM
+ });
+ var rs = r._readableState;
+
+ r._read = push;
+
+ r.on('readable', function() {
+ ;false && console.error('>> readable');
+ do {
+ ;false && console.error(' > read(%d)', READSIZE);
+ var ret = r.read(READSIZE);
+ ;false && console.error(' < %j (%d remain)', ret && ret.length, rs.length);
+ } while (ret && ret.length === READSIZE);
+
+ ;false && console.error('<< after read()',
+ ret && ret.length,
+ rs.needReadable,
+ rs.length);
+ });
+
+ var endEmitted = false;
+ r.on('end', function() {
+ t.equal(pushes, PUSHCOUNT + 1);
+ t.end();
+ ;false && console.error('end');
+ });
+
+ var pushes = 0;
+ function push() {
+ if (pushes > PUSHCOUNT)
+ return;
+
+ if (pushes++ === PUSHCOUNT) {
+ ;false && console.error(' push(EOF)');
+ return r.push(null);
+ }
+
+ ;false && console.error(' push #%d', pushes);
+ if (r.push(new Buffer(PUSHSIZE)))
+ setTimeout(push);
+ }
+
+ // start the flow
+ var ret = r.read(0);
+
+ });
+}
diff --git a/test/browser/test-stream2-objects.js b/test/browser/test-stream2-objects.js
new file mode 100644
index 0000000..26a038b
--- /dev/null
+++ b/test/browser/test-stream2-objects.js
@@ -0,0 +1,306 @@
+'use strict';
+var common = require('../common');
+var Readable = require('../../lib/_stream_readable');
+var Writable = require('../../lib/_stream_writable');
+
+module.exports = function (t) {
+
+
+
+ function toArray(callback) {
+ var stream = new Writable({ objectMode: true });
+ var list = [];
+ stream.write = function(chunk) {
+ list.push(chunk);
+ };
+
+ stream.end = function() {
+ callback(list);
+ };
+
+ return stream;
+ }
+
+ function fromArray(list) {
+ var r = new Readable({ objectMode: true });
+ r._read = noop;
+ forEach(list, function(chunk) {
+ r.push(chunk);
+ });
+ r.push(null);
+
+ return r;
+ }
+
+ function noop() {}
+
+ t.test('can read objects from stream', function(t) {
+ var r = fromArray([{ one: '1'}, { two: '2' }]);
+
+ var v1 = r.read();
+ var v2 = r.read();
+ var v3 = r.read();
+
+ t.deepEqual(v1, { one: '1' });
+ t.deepEqual(v2, { two: '2' });
+ t.deepEqual(v3, null);
+
+ t.end();
+ });
+
+ t.test('can pipe objects into stream', function(t) {
+ var r = fromArray([{ one: '1'}, { two: '2' }]);
+
+ r.pipe(toArray(function(list) {
+ t.deepEqual(list, [
+ { one: '1' },
+ { two: '2' }
+ ]);
+
+ t.end();
+ }));
+ });
+
+ t.test('read(n) is ignored', function(t) {
+ var r = fromArray([{ one: '1'}, { two: '2' }]);
+
+ var value = r.read(2);
+
+ t.deepEqual(value, { one: '1' });
+
+ t.end();
+ });
+
+ t.test('can read objects from _read (sync)', function(t) {
+ var r = new Readable({ objectMode: true });
+ var list = [{ one: '1'}, { two: '2' }];
+ r._read = function(n) {
+ var item = list.shift();
+ r.push(item || null);
+ };
+
+ r.pipe(toArray(function(list) {
+ t.deepEqual(list, [
+ { one: '1' },
+ { two: '2' }
+ ]);
+
+ t.end();
+ }));
+ });
+
+ t.test('can read objects from _read (async)', function(t) {
+ var r = new Readable({ objectMode: true });
+ var list = [{ one: '1'}, { two: '2' }];
+ r._read = function(n) {
+ var item = list.shift();
+ process.nextTick(function() {
+ r.push(item || null);
+ });
+ };
+
+ r.pipe(toArray(function(list) {
+ t.deepEqual(list, [
+ { one: '1' },
+ { two: '2' }
+ ]);
+
+ t.end();
+ }));
+ });
+
+ t.test('can read strings as objects', function(t) {
+ var r = new Readable({
+ objectMode: true
+ });
+ r._read = noop;
+ var list = ['one', 'two', 'three'];
+ forEach(list, function(str) {
+ r.push(str);
+ });
+ r.push(null);
+
+ r.pipe(toArray(function(array) {
+ t.deepEqual(array, list);
+
+ t.end();
+ }));
+ });
+
+ t.test('read(0) for object streams', function(t) {
+ var r = new Readable({
+ objectMode: true
+ });
+ r._read = noop;
+
+ r.push('foobar');
+ r.push(null);
+
+ var v = r.read(0);
+
+ r.pipe(toArray(function(array) {
+ t.deepEqual(array, ['foobar']);
+
+ t.end();
+ }));
+ });
+
+ t.test('falsey values', function(t) {
+ var r = new Readable({
+ objectMode: true
+ });
+ r._read = noop;
+
+ r.push(false);
+ r.push(0);
+ r.push('');
+ r.push(null);
+
+ r.pipe(toArray(function(array) {
+ t.deepEqual(array, [false, 0, '']);
+
+ t.end();
+ }));
+ });
+
+ t.test('high watermark _read', function(t) {
+ var r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ });
+ var calls = 0;
+ var list = ['1', '2', '3', '4', '5', '6', '7', '8'];
+
+ r._read = function(n) {
+ calls++;
+ };
+
+ forEach(list, function(c) {
+ r.push(c);
+ });
+
+ var v = r.read();
+
+ t.equal(calls, 0);
+ t.equal(v, '1');
+
+ var v2 = r.read();
+ t.equal(v2, '2');
+
+ var v3 = r.read();
+ t.equal(v3, '3');
+
+ t.equal(calls, 1);
+
+ t.end();
+ });
+
+ t.test('high watermark push', function(t) {
+ var r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ });
+ r._read = function(n) {};
+ for (var i = 0; i < 6; i++) {
+ var bool = r.push(i);
+ t.equal(bool, i === 5 ? false : true);
+ }
+
+ t.end();
+ });
+
+ t.test('can write objects to stream', function(t) {
+ var w = new Writable({ objectMode: true });
+
+ w._write = function(chunk, encoding, cb) {
+ t.deepEqual(chunk, { foo: 'bar' });
+ cb();
+ };
+
+ w.on('finish', function() {
+ t.end();
+ });
+
+ w.write({ foo: 'bar' });
+ w.end();
+ });
+
+ t.test('can write multiple objects to stream', function(t) {
+ var w = new Writable({ objectMode: true });
+ var list = [];
+
+ w._write = function(chunk, encoding, cb) {
+ list.push(chunk);
+ cb();
+ };
+
+ w.on('finish', function() {
+ t.deepEqual(list, [0, 1, 2, 3, 4]);
+
+ t.end();
+ });
+
+ w.write(0);
+ w.write(1);
+ w.write(2);
+ w.write(3);
+ w.write(4);
+ w.end();
+ });
+
+ t.test('can write strings as objects', function(t) {
+ var w = new Writable({
+ objectMode: true
+ });
+ var list = [];
+
+ w._write = function(chunk, encoding, cb) {
+ list.push(chunk);
+ process.nextTick(cb);
+ };
+
+ w.on('finish', function() {
+ t.deepEqual(list, ['0', '1', '2', '3', '4']);
+
+ t.end();
+ });
+
+ w.write('0');
+ w.write('1');
+ w.write('2');
+ w.write('3');
+ w.write('4');
+ w.end();
+ });
+
+ t.test('buffers finish until cb is called', function(t) {
+ var w = new Writable({
+ objectMode: true
+ });
+ var called = false;
+
+ w._write = function(chunk, encoding, cb) {
+ t.equal(chunk, 'foo');
+
+ process.nextTick(function() {
+ called = true;
+ cb();
+ });
+ };
+
+ w.on('finish', function() {
+ t.equal(called, true);
+
+ t.end();
+ });
+
+ w.write('foo');
+ w.end();
+ });
+
+ function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+ }
+};
diff --git a/test/browser/test-stream2-pipe-error-handling.js b/test/browser/test-stream2-pipe-error-handling.js
new file mode 100644
index 0000000..dc91cc3
--- /dev/null
+++ b/test/browser/test-stream2-pipe-error-handling.js
@@ -0,0 +1,88 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+var stream = require('../../');
+module.exports = function (t) {
+ t.test('Error Listener Catches', function (t) {
+ var count = 1000;
+
+ var source = new stream.Readable();
+ source._read = function(n) {
+ n = Math.min(count, n);
+ count -= n;
+ source.push(new Buffer(n));
+ };
+
+ var unpipedDest;
+ source.unpipe = function(dest) {
+ unpipedDest = dest;
+ stream.Readable.prototype.unpipe.call(this, dest);
+ };
+
+ var dest = new stream.Writable();
+ dest._write = function(chunk, encoding, cb) {
+ cb();
+ };
+
+ source.pipe(dest);
+
+ var gotErr = null;
+ dest.on('error', function(err) {
+ gotErr = err;
+ });
+
+ var unpipedSource;
+ dest.on('unpipe', function(src) {
+ unpipedSource = src;
+ });
+
+ var err = new Error('This stream turned into bacon.');
+ dest.emit('error', err);
+ t.strictEqual(gotErr, err);
+ t.strictEqual(unpipedSource, source);
+ t.strictEqual(unpipedDest, dest);
+ t.end();
+ });
+
+ t.test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) {
+ var count = 1000;
+
+ var source = new stream.Readable();
+ source._read = function(n) {
+ n = Math.min(count, n);
+ count -= n;
+ source.push(new Buffer(n));
+ };
+
+ var unpipedDest;
+ source.unpipe = function(dest) {
+ unpipedDest = dest;
+ stream.Readable.prototype.unpipe.call(this, dest);
+ };
+
+ var dest = new stream.Writable();
+ dest._write = function(chunk, encoding, cb) {
+ cb();
+ };
+
+ source.pipe(dest);
+
+ var unpipedSource;
+ dest.on('unpipe', function(src) {
+ unpipedSource = src;
+ });
+
+ var err = new Error('This stream turned into bacon.');
+
+ var gotErr = null;
+ try {
+ dest.emit('error', err);
+ } catch (e) {
+ gotErr = e;
+ }
+ t.strictEqual(gotErr, err);
+ t.strictEqual(unpipedSource, source);
+ t.strictEqual(unpipedDest, dest);
+ t.end();
+ });
+}
diff --git a/test/browser/test-stream2-pipe-error-once-listener.js b/test/browser/test-stream2-pipe-error-once-listener.js
new file mode 100644
index 0000000..5f4a4e2
--- /dev/null
+++ b/test/browser/test-stream2-pipe-error-once-listener.js
@@ -0,0 +1,41 @@
+'use strict';
+var common = require('../common');
+
+var inherits = require('inherits');
+var stream = require('../../');
+
+module.exports = function (t) {
+ t.test('pipe error once listener', function (t){
+ t.plan(1);
+ var Read = function() {
+ stream.Readable.call(this);
+ };
+ inherits(Read, stream.Readable);
+
+ Read.prototype._read = function(size) {
+ this.push('x');
+ this.push(null);
+ };
+
+
+ var Write = function() {
+ stream.Writable.call(this);
+ };
+ inherits(Write, stream.Writable);
+
+ Write.prototype._write = function(buffer, encoding, cb) {
+ this.emit('error', new Error('boom'));
+ this.emit('alldone');
+ };
+
+ var read = new Read();
+ var write = new Write();
+
+ write.once('error', function(err) {});
+ write.once('alldone', function(err) {
+ t.ok(true);
+ });
+
+ read.pipe(write);
+ });
+}
diff --git a/test/browser/test-stream2-push.js b/test/browser/test-stream2-push.js
new file mode 100644
index 0000000..7ca5f39
--- /dev/null
+++ b/test/browser/test-stream2-push.js
@@ -0,0 +1,120 @@
+'use strict';
+var common = require('../common');
+var stream = require('../../');
+var Readable = stream.Readable;
+var Writable = stream.Writable;
+
+
+var inherits = require('inherits');
+var EE = require('events').EventEmitter;
+module.exports = function (t) {
+
+// a mock thing a bit like the net.Socket/tcp_wrap.handle interaction
+ t.test('push', function (t) {
+ var stream = new Readable({
+ highWaterMark: 16,
+ encoding: 'utf8'
+ });
+
+ var source = new EE();
+
+ stream._read = function() {
+ //console.error('stream._read');
+ readStart();
+ };
+
+ var ended = false;
+ stream.on('end', function() {
+ ended = true;
+ });
+
+ source.on('data', function(chunk) {
+ var ret = stream.push(chunk);
+ //console.error('data', stream._readableState.length);
+ if (!ret)
+ readStop();
+ });
+
+ source.on('end', function() {
+ stream.push(null);
+ });
+
+ var reading = false;
+
+ function readStart() {
+ //console.error('readStart');
+ reading = true;
+ }
+
+ function readStop() {
+ //console.error('readStop');
+ reading = false;
+ process.nextTick(function() {
+ var r = stream.read();
+ if (r !== null)
+ writer.write(r);
+ });
+ }
+
+ var writer = new Writable({
+ decodeStrings: false
+ });
+
+ var written = [];
+
+ var expectWritten =
+ [ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg' ];
+
+ writer._write = function(chunk, encoding, cb) {
+ //console.error('WRITE %s', chunk);
+ written.push(chunk);
+ process.nextTick(cb);
+ };
+
+ writer.on('finish', finish);
+
+
+ // now emit some chunks.
+
+ var chunk = 'asdfg';
+
+ var set = 0;
+ readStart();
+ data();
+ function data() {
+ t.ok(reading);
+ source.emit('data', chunk);
+ t.ok(reading);
+ source.emit('data', chunk);
+ t.ok(reading);
+ source.emit('data', chunk);
+ t.ok(reading);
+ source.emit('data', chunk);
+ t.notOk(reading);
+ if (set++ < 5)
+ setTimeout(data, 10);
+ else
+ end();
+ }
+
+ function finish() {
+ //console.error('finish');
+ t.deepEqual(written, expectWritten);
+ t.end();
+ }
+
+ function end() {
+ source.emit('end');
+ t.notOk(reading);
+ writer.end(stream.read());
+ setTimeout(function() {
+ t.ok(ended);
+ });
+ }
+ });
+};
diff --git a/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/test/browser/test-stream2-readable-empty-buffer-no-eof.js
new file mode 100644
index 0000000..04c622e
--- /dev/null
+++ b/test/browser/test-stream2-readable-empty-buffer-no-eof.js
@@ -0,0 +1,91 @@
+'use strict';
+var common = require('../common');
+
+var Readable = require('../../').Readable;
+
+module.exports = function (t) {
+ t.test('readable empty buffer no eof 1', function (t) {
+ t.plan(1);
+ var r = new Readable();
+
+ // should not end when we get a Buffer(0) or '' as the _read result
+ // that just means that there is *temporarily* no data, but to go
+ // ahead and try again later.
+ //
+ // note that this is very unusual. it only works for crypto streams
+ // because the other side of the stream will call read(0) to cycle
+ // data through openssl. that's why we set the timeouts to call
+ // r.read(0) again later, otherwise there is no more work being done
+ // and the process just exits.
+
+ var buf = new Buffer(5);
+ buf.fill('x');
+ var reads = 5;
+ r._read = function(n) {
+ switch (reads--) {
+ case 0:
+ return r.push(null); // EOF
+ case 1:
+ return r.push(buf);
+ case 2:
+ setTimeout(r.read.bind(r, 0), 50);
+ return r.push(new Buffer(0)); // Not-EOF!
+ case 3:
+ setTimeout(r.read.bind(r, 0), 50);
+ return process.nextTick(function() {
+ return r.push(new Buffer(0));
+ });
+ case 4:
+ setTimeout(r.read.bind(r, 0), 50);
+ return setTimeout(function() {
+ return r.push(new Buffer(0));
+ });
+ case 5:
+ return setTimeout(function() {
+ return r.push(buf);
+ });
+ default:
+ throw new Error('unreachable');
+ }
+ };
+
+ var results = [];
+ function flow() {
+ var chunk;
+ while (null !== (chunk = r.read()))
+ results.push(chunk + '');
+ }
+ r.on('readable', flow);
+ r.on('end', function() {
+ results.push('EOF');
+ t.deepEqual(results, [ 'xxxxx', 'xxxxx', 'EOF' ]);
+ });
+ flow();
+
+ });
+
+ t.test('readable empty buffer no eof 2', function (t) {
+ t.plan(1);
+ var r = new Readable({ encoding: 'base64' });
+ var reads = 5;
+ r._read = function(n) {
+ if (!reads--)
+ return r.push(null); // EOF
+ else
+ return r.push(new Buffer('x'));
+ };
+
+ var results = [];
+ function flow() {
+ var chunk;
+ while (null !== (chunk = r.read()))
+ results.push(chunk + '');
+ }
+ r.on('readable', flow);
+ r.on('end', function() {
+ results.push('EOF');
+ t.deepEqual(results, [ 'eHh4', 'eHg=', 'EOF' ]);
+ });
+ flow();
+ });
+}
diff --git a/test/browser/test-stream2-readable-from-list.js b/test/browser/test-stream2-readable-from-list.js
new file mode 100644
index 0000000..6249266
--- /dev/null
+++ b/test/browser/test-stream2-readable-from-list.js
@@ -0,0 +1,66 @@
+'use strict';
+var common = require('../common');
+var fromList = require('../../lib/_stream_readable')._fromList;
+
+
+module.exports = function (t) {
+ t.test('buffers', function(t) {
+ // have a length
+ var len = 16;
+ var list = [ new Buffer('foog'),
+ new Buffer('bark'),
+ new Buffer('bazy'),
+ new Buffer('kuel') ];
+
+ // read more than the first element.
+ var ret = fromList(6, { buffer: list, length: 16 });
+ t.equal(ret.toString(), 'foogba');
+
+ // read exactly the first element.
+ ret = fromList(2, { buffer: list, length: 10 });
+ t.equal(ret.toString(), 'rk');
+
+ // read less than the first element.
+ ret = fromList(2, { buffer: list, length: 8 });
+ t.equal(ret.toString(), 'ba');
+
+ // read more than we have.
+ ret = fromList(100, { buffer: list, length: 6 });
+ t.equal(ret.toString(), 'zykuel');
+
+ // all consumed.
+ t.same(list, []);
+
+ t.end();
+ });
+
+ t.test('strings', function(t) {
+ // have a length
+ var len = 16;
+ var list = [ 'foog',
+ 'bark',
+ 'bazy',
+ 'kuel' ];
+
+ // read more than the first element.
+ var ret = fromList(6, { buffer: list, length: 16, decoder: true });
+ t.equal(ret, 'foogba');
+
+ // read exactly the first element.
+ ret = fromList(2, { buffer: list, length: 10, decoder: true });
+ t.equal(ret, 'rk');
+
+ // read less than the first element.
+ ret = fromList(2, { buffer: list, length: 8, decoder: true });
+ t.equal(ret, 'ba');
+
+ // read more than we have.
+ ret = fromList(100, { buffer: list, length: 6, decoder: true });
+ t.equal(ret, 'zykuel');
+
+ // all consumed.
+ t.same(list, []);
+
+ t.end();
+ });
+}
diff --git a/test/browser/test-stream2-readable-legacy-drain.js b/test/browser/test-stream2-readable-legacy-drain.js
new file mode 100644
index 0000000..7abfbc0
--- /dev/null
+++ b/test/browser/test-stream2-readable-legacy-drain.js
@@ -0,0 +1,52 @@
+'use strict';
+var common = require('../common');
+
+var Stream = require('../../');
+var Readable = require('../../').Readable;
+module.exports = function (t) {
+ t.test('readable legacy drain', function (t) {
+ var r = new Readable();
+ var N = 256;
+ var reads = 0;
+ r._read = function(n) {
+ return r.push(++reads === N ? null : new Buffer(1));
+ };
+ t.plan(2);
+ r.on('end', function() {
+ t.ok(true, 'rended');
+ });
+
+ var w = new Stream();
+ w.writable = true;
+ var writes = 0;
+ var buffered = 0;
+ w.write = function(c) {
+ writes += c.length;
+ buffered += c.length;
+ process.nextTick(drain);
+ return false;
+ };
+
+ function drain() {
+ if(buffered > 3) {
+ t.ok(false, 'to much buffer');
+ }
+ buffered = 0;
+ w.emit('drain');
+ }
+
+
+ w.end = function() {
+ t.ok(true, 'wended');
+ };
+
+ // Just for kicks, let's mess with the drain count.
+ // This verifies that even if it gets negative in the
+ // pipe() cleanup function, we'll still function properly.
+ r.on('readable', function() {
+ w.emit('drain');
+ });
+
+ r.pipe(w);
+});
+}
diff --git a/test/browser/test-stream2-readable-non-empty-end.js b/test/browser/test-stream2-readable-non-empty-end.js
new file mode 100644
index 0000000..14cf6bb
--- /dev/null
+++ b/test/browser/test-stream2-readable-non-empty-end.js
@@ -0,0 +1,57 @@
+'use strict';
+var common = require('../common');
+var Readable = require('../../lib/_stream_readable');
+module.exports = function (t) {
+ t.test('non empty end', function (t) {
+ t.plan(4);
+ var len = 0;
+ var chunks = new Array(10);
+ for (var i = 1; i <= 10; i++) {
+ chunks[i - 1] = new Buffer(i);
+ len += i;
+ }
+
+ var test = new Readable();
+ var n = 0;
+ test._read = function(size) {
+ var chunk = chunks[n++];
+ setTimeout(function() {
+ test.push(chunk === undefined ? null : chunk);
+ });
+ };
+
+ test.on('end', thrower);
+ function thrower() {
+ throw new Error('this should not happen!');
+ }
+
+ var bytesread = 0;
+ test.on('readable', function() {
+ var b = len - bytesread - 1;
+ var res = test.read(b);
+ if (res) {
+ bytesread += res.length;
+ //console.error('br=%d len=%d', bytesread, len);
+ setTimeout(next);
+ }
+ test.read(0);
+ });
+ test.read(0);
+
+ function next() {
+ // now let's make 'end' happen
+ test.removeListener('end', thrower);
+
+ test.on('end', function() {
+ t.ok(true, 'end emitted');
+ });
+
+ // one to get the last byte
+ var r = test.read();
+ t.ok(r);
+ t.equal(r.length, 1);
+ r = test.read();
+ t.equal(r, null);
+ }
+ });
+}
diff --git a/test/browser/test-stream2-readable-wrap-empty.js b/test/browser/test-stream2-readable-wrap-empty.js
new file mode 100644
index 0000000..d13bbba
--- /dev/null
+++ b/test/browser/test-stream2-readable-wrap-empty.js
@@ -0,0 +1,24 @@
+'use strict';
+var common = require('../common');
+
+var Readable = require('../../lib/_stream_readable');
+var EE = require('events').EventEmitter;
+module.exports = function (t) {
+ t.test('wrap empty', function (t) {
+ t.plan(1);
+ var oldStream = new EE();
+ oldStream.pause = function() {};
+ oldStream.resume = function() {};
+
+ var newStream = new Readable().wrap(oldStream);
+
+ newStream
+ .on('readable', function() {})
+ .on('end', function() {
+ t.ok(true, 'ended');
+ });
+
+ oldStream.emit('end');
+
+ })
+}
diff --git a/test/browser/test-stream2-readable-wrap.js b/test/browser/test-stream2-readable-wrap.js
new file mode 100644
index 0000000..04f12b8
--- /dev/null
+++ b/test/browser/test-stream2-readable-wrap.js
@@ -0,0 +1,86 @@
+'use strict';
+var common = require('../common');
+
+var Readable = require('../../lib/_stream_readable');
+var Writable = require('../../lib/_stream_writable');
+var EE = require('events').EventEmitter;
+var run = 0;
+function runTest(t, highWaterMark, objectMode, produce) {
+ t.test('run #' + (++run), function (t) {
+ var old = new EE();
+ var r = new Readable({ highWaterMark: highWaterMark,
+ objectMode: objectMode });
+ t.equal(r, r.wrap(old));
+
+ var ended = false;
+ r.on('end', function() {
+ ended = true;
+ });
+
+ old.pause = function() {
+ //console.error('old.pause()');
+ old.emit('pause');
+ flowing = false;
+ };
+
+ old.resume = function() {
+ //console.error('old.resume()');
+ old.emit('resume');
+ flow();
+ };
+
+ var flowing;
+ var chunks = 10;
+ var oldEnded = false;
+ var expected = [];
+ function flow() {
+ flowing = true;
+ while (flowing && chunks-- > 0) {
+ var item = produce();
+ expected.push(item);
+ //console.log('old.emit', chunks, flowing);
+ old.emit('data', item);
+ //console.log('after emit', chunks, flowing);
+ }
+ if (chunks <= 0) {
+ oldEnded = true;
+ //console.log('old end', chunks, flowing);
+ old.emit('end');
+ }
+ }
+
+ var w = new Writable({ highWaterMark: highWaterMark * 2,
+ objectMode: objectMode });
+ var written = [];
+ w._write = function(chunk, encoding, cb) {
+ //console.log('_write', chunk);
+ written.push(chunk);
+ setTimeout(cb);
+ };
+
+ w.on('finish', function() {
+ performAsserts();
+ });
+
+ r.pipe(w);
+
+ flow();
+
+ function performAsserts() {
+ t.ok(ended);
+ t.ok(oldEnded);
+ t.deepEqual(written, expected);
+ t.end();
+ }
+ });
+}
+module.exports = function (t) {
+ t.test('readable wrap', function (t) {
+ runTest(t, 100, false, function() { return new Buffer(100); });
+ runTest(t, 10, false, function() { return new Buffer('xxxxxxxxxx'); });
+ runTest(t, 1, true, function() { return { foo: 'bar' }; });
+
+ var objectChunks = [ 5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555 ];
+ runTest(t, 1, true, function() { return objectChunks.shift(); });
+ });
+}
diff --git a/test/browser/test-stream2-set-encoding.js b/test/browser/test-stream2-set-encoding.js
new file mode 100644
index 0000000..b174f30
--- /dev/null
+++ b/test/browser/test-stream2-set-encoding.js
@@ -0,0 +1,317 @@
+'use strict';
+var common = require('../common');
+var R = require('../../lib/_stream_readable');
+var util = {
+ inherits: require('inherits')
+};
+
+// tiny node-tap lookalike.
+module.exports = function (t) {
+ var test = t.test;
+ /////
+
+ util.inherits(TestReader, R);
+
+ function TestReader(n, opts) {
+ R.call(this, opts);
+
+ this.pos = 0;
+ this.len = n || 100;
+ }
+
+ TestReader.prototype._read = function(n) {
+ setTimeout(function() {
+
+ if (this.pos >= this.len) {
+ // double push(null) to test eos handling
+ this.push(null);
+ return this.push(null);
+ }
+
+ n = Math.min(n, this.len - this.pos);
+ if (n <= 0) {
+ // double push(null) to test eos handling
+ this.push(null);
+ return this.push(null);
+ }
+
+ this.pos += n;
+ var ret = new Buffer(n);
+ ret.fill('a');
+
+ //console.log('this.push(ret)', ret);
+
+ return this.push(ret);
+ }.bind(this), 1);
+ };
+
+ test('setEncoding utf8', function(t) {
+ var tr = new TestReader(100);
+ tr.setEncoding('utf8');
+ var out = [];
+ var expect =
+ [ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+ });
+
+
+ test('setEncoding hex', function(t) {
+ var tr = new TestReader(100);
+ tr.setEncoding('hex');
+ var out = [];
+ var expect =
+ [ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+ });
+
+ test('setEncoding hex with read(13)', function(t) {
+ var tr = new TestReader(100);
+ tr.setEncoding('hex');
+ var out = [];
+ var expect =
+ [ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161' ];
+
+ tr.on('readable', function flow() {
+ //console.log('readable once');
+ var chunk;
+ while (null !== (chunk = tr.read(13)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ //console.log('END');
+ t.same(out, expect);
+ t.end();
+ });
+ });
+
+ test('setEncoding base64', function(t) {
+ var tr = new TestReader(100);
+ tr.setEncoding('base64');
+ var out = [];
+ var expect =
+ [ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ==' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+ });
+
+ test('encoding: utf8', function(t) {
+ var tr = new TestReader(100, { encoding: 'utf8' });
+ var out = [];
+ var expect =
+ [ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+ });
+
+
+ test('encoding: hex', function(t) {
+ var tr = new TestReader(100, { encoding: 'hex' });
+ var out = [];
+ var expect =
+ [ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+ });
+
+ test('encoding: hex with read(13)', function(t) {
+ var tr = new TestReader(100, { encoding: 'hex' });
+ var out = [];
+ var expect =
+ [ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(13)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+ });
+
+ test('encoding: base64', function(t) {
+ var tr = new TestReader(100, { encoding: 'base64' });
+ var out = [];
+ var expect =
+ [ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ==' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+ });
+
+ test('chainable', function(t) {
+ var tr = new TestReader(100);
+ t.equal(tr.setEncoding('utf8'), tr);
+ t.end();
+ });
+}
diff --git a/test/browser/test-stream2-transform.js b/test/browser/test-stream2-transform.js
new file mode 100644
index 0000000..921c951
--- /dev/null
+++ b/test/browser/test-stream2-transform.js
@@ -0,0 +1,473 @@
+'use strict';
+var common = require('../common');
+var PassThrough = require('../../lib/_stream_passthrough');
+var Transform = require('../../lib/_stream_transform');
+
+/////
+module.exports = function (t) {
+ t.test('writable side consumption', function(t) {
+ var tx = new Transform({
+ highWaterMark: 10
+ });
+
+ var transformed = 0;
+ tx._transform = function(chunk, encoding, cb) {
+ transformed += chunk.length;
+ tx.push(chunk);
+ cb();
+ };
+
+ for (var i = 1; i <= 10; i++) {
+ tx.write(new Buffer(i));
+ }
+ tx.end();
+
+ t.equal(tx._readableState.length, 10);
+ t.equal(transformed, 10);
+ t.equal(tx._transformState.writechunk.length, 5);
+ t.same(tx._writableState.getBuffer().map(function(c) {
+ return c.chunk.length;
+ }), [6, 7, 8, 9, 10]);
+
+ t.end();
+ });
+
+ t.test('passthrough', function(t) {
+ var pt = new PassThrough();
+
+ pt.write(new Buffer('foog'));
+ pt.write(new Buffer('bark'));
+ pt.write(new Buffer('bazy'));
+ pt.write(new Buffer('kuel'));
+ pt.end();
+
+ t.equal(pt.read(5).toString(), 'foogb');
+ t.equal(pt.read(5).toString(), 'arkba');
+ t.equal(pt.read(5).toString(), 'zykue');
+ t.equal(pt.read(5).toString(), 'l');
+ t.end();
+ });
+
+ t.test('object passthrough', function(t) {
+ var pt = new PassThrough({ objectMode: true });
+
+ pt.write(1);
+ pt.write(true);
+ pt.write(false);
+ pt.write(0);
+ pt.write('foo');
+ pt.write('');
+ pt.write({ a: 'b'});
+ pt.end();
+
+ t.equal(pt.read(), 1);
+ t.equal(pt.read(), true);
+ t.equal(pt.read(), false);
+ t.equal(pt.read(), 0);
+ t.equal(pt.read(), 'foo');
+ t.equal(pt.read(), '');
+ t.same(pt.read(), { a: 'b'});
+ t.end();
+ });
+
+ t.test('simple transform', function(t) {
+ var pt = new Transform();
+ pt._transform = function(c, e, cb) {
+ var ret = new Buffer(c.length);
+ ret.fill('x');
+ pt.push(ret);
+ cb();
+ };
+
+ pt.write(new Buffer('foog'));
+ pt.write(new Buffer('bark'));
+ pt.write(new Buffer('bazy'));
+ pt.write(new Buffer('kuel'));
+ pt.end();
+
+ t.equal(pt.read(5).toString(), 'xxxxx');
+ t.equal(pt.read(5).toString(), 'xxxxx');
+ t.equal(pt.read(5).toString(), 'xxxxx');
+ t.equal(pt.read(5).toString(), 'x');
+ t.end();
+ });
+
+ t.test('simple object transform', function(t) {
+ var pt = new Transform({ objectMode: true });
+ pt._transform = function(c, e, cb) {
+ pt.push(JSON.stringify(c));
+ cb();
+ };
+
+ pt.write(1);
+ pt.write(true);
+ pt.write(false);
+ pt.write(0);
+ pt.write('foo');
+ pt.write('');
+ pt.write({ a: 'b'});
+ pt.end();
+
+ t.equal(pt.read(), '1');
+ t.equal(pt.read(), 'true');
+ t.equal(pt.read(), 'false');
+ t.equal(pt.read(), '0');
+ t.equal(pt.read(), '"foo"');
+ t.equal(pt.read(), '""');
+ t.equal(pt.read(), '{"a":"b"}');
+ t.end();
+ });
+
+ t.test('async passthrough', function(t) {
+ var pt = new Transform();
+ pt._transform = function(chunk, encoding, cb) {
+ setTimeout(function() {
+ pt.push(chunk);
+ cb();
+ }, 10);
+ };
+
+ pt.write(new Buffer('foog'));
+ pt.write(new Buffer('bark'));
+ pt.write(new Buffer('bazy'));
+ pt.write(new Buffer('kuel'));
+ pt.end();
+
+ pt.on('finish', function() {
+ t.equal(pt.read(5).toString(), 'foogb');
+ t.equal(pt.read(5).toString(), 'arkba');
+ t.equal(pt.read(5).toString(), 'zykue');
+ t.equal(pt.read(5).toString(), 'l');
+ t.end();
+ });
+ });
+
+ t.test('assymetric transform (expand)', function(t) {
+ var pt = new Transform();
+
+ // emit each chunk 2 times.
+ pt._transform = function(chunk, encoding, cb) {
+ setTimeout(function() {
+ pt.push(chunk);
+ setTimeout(function() {
+ pt.push(chunk);
+ cb();
+ }, 10);
+ }, 10);
+ };
+
+ pt.write(new Buffer('foog'));
+ pt.write(new Buffer('bark'));
+ pt.write(new Buffer('bazy'));
+ pt.write(new Buffer('kuel'));
+ pt.end();
+
+ pt.on('finish', function() {
+ t.equal(pt.read(5).toString(), 'foogf');
+ t.equal(pt.read(5).toString(), 'oogba');
+ t.equal(pt.read(5).toString(), 'rkbar');
+ t.equal(pt.read(5).toString(), 'kbazy');
+ t.equal(pt.read(5).toString(), 'bazyk');
+ t.equal(pt.read(5).toString(), 'uelku');
+ t.equal(pt.read(5).toString(), 'el');
+ t.end();
+ });
+ });
+
+ t.test('assymetric transform (compress)', function(t) {
+ var pt = new Transform();
+
+ // each output is the first char of 3 consecutive chunks,
+ // or whatever's left.
+ pt.state = '';
+
+ pt._transform = function(chunk, encoding, cb) {
+ if (!chunk)
+ chunk = '';
+ var s = chunk.toString();
+ setTimeout(function() {
+ this.state += s.charAt(0);
+ if (this.state.length === 3) {
+ pt.push(new Buffer(this.state));
+ this.state = '';
+ }
+ cb();
+ }.bind(this), 10);
+ };
+
+ pt._flush = function(cb) {
+ // just output whatever we have.
+ pt.push(new Buffer(this.state));
+ this.state = '';
+ cb();
+ };
+
+ pt.write(new Buffer('aaaa'));
+ pt.write(new Buffer('bbbb'));
+ pt.write(new Buffer('cccc'));
+ pt.write(new Buffer('dddd'));
+ pt.write(new Buffer('eeee'));
+ pt.write(new Buffer('aaaa'));
+ pt.write(new Buffer('bbbb'));
+ pt.write(new Buffer('cccc'));
+ pt.write(new Buffer('dddd'));
+ pt.write(new Buffer('eeee'));
+ pt.write(new Buffer('aaaa'));
+ pt.write(new Buffer('bbbb'));
+ pt.write(new Buffer('cccc'));
+ pt.write(new Buffer('dddd'));
+ pt.end();
+
+ // 'abcdeabcdeabcd'
+ pt.on('finish', function() {
+ t.equal(pt.read(5).toString(), 'abcde');
+ t.equal(pt.read(5).toString(), 'abcde');
+ t.equal(pt.read(5).toString(), 'abcd');
+ t.end();
+ });
+ });
+
+ // this tests for a stall when data is written to a full stream
+ // that has empty transforms.
+ t.test('complex transform', function(t) {
+ var count = 0;
+ var saved = null;
+ var pt = new Transform({highWaterMark:3});
+ pt._transform = function(c, e, cb) {
+ if (count++ === 1)
+ saved = c;
+ else {
+ if (saved) {
+ pt.push(saved);
+ saved = null;
+ }
+ pt.push(c);
+ }
+
+ cb();
+ };
+
+ pt.once('readable', function() {
+ process.nextTick(function() {
+ pt.write(new Buffer('d'));
+ pt.write(new Buffer('ef'), function() {
+ pt.end();
+ t.end();
+ });
+ t.equal(pt.read().toString(), 'abcdef');
+ t.equal(pt.read(), null);
+ });
+ });
+
+ pt.write(new Buffer('abc'));
+ });
+
+
+ t.test('passthrough event emission', function(t) {
+ var pt = new PassThrough();
+ var emits = 0;
+ pt.on('readable', function() {
+ var state = pt._readableState;
+ //console.error('>>> emit readable %d', emits);
+ emits++;
+ });
+
+ var i = 0;
+
+ pt.write(new Buffer('foog'));
+
+ //console.error('need emit 0');
+ pt.write(new Buffer('bark'));
+
+ //console.error('should have emitted readable now 1 === %d', emits);
+ t.equal(emits, 1);
+
+ t.equal(pt.read(5).toString(), 'foogb');
+ t.equal(pt.read(5) + '', 'null');
+
+ //console.error('need emit 1');
+
+ pt.write(new Buffer('bazy'));
+ //console.error('should have emitted, but not again');
+ pt.write(new Buffer('kuel'));
+
+ //console.error('should have emitted readable now 2 === %d', emits);
+ t.equal(emits, 2);
+
+ t.equal(pt.read(5).toString(), 'arkba');
+ t.equal(pt.read(5).toString(), 'zykue');
+ t.equal(pt.read(5), null);
+
+ //console.error('need emit 2');
+
+ pt.end();
+
+ t.equal(emits, 3);
+
+ t.equal(pt.read(5).toString(), 'l');
+ t.equal(pt.read(5), null);
+
+ //console.error('should not have emitted again');
+ t.equal(emits, 3);
+ t.end();
+ });
+
+ t.test('passthrough event emission reordered', function(t) {
+ var pt = new PassThrough();
+ var emits = 0;
+ pt.on('readable', function() {
+ //console.error('emit readable', emits);
+ emits++;
+ });
+
+ pt.write(new Buffer('foog'));
+ //console.error('need emit 0');
+ pt.write(new Buffer('bark'));
+ //console.error('should have emitted readable now 1 === %d', emits);
+ t.equal(emits, 1);
+
+ t.equal(pt.read(5).toString(), 'foogb');
+ t.equal(pt.read(5), null);
+
+ //console.error('need emit 1');
+ pt.once('readable', function() {
+ t.equal(pt.read(5).toString(), 'arkba');
+
+ t.equal(pt.read(5), null);
+
+ //console.error('need emit 2');
+ pt.once('readable', function() {
+ t.equal(pt.read(5).toString(), 'zykue');
+ t.equal(pt.read(5), null);
+ pt.once('readable', function() {
+ t.equal(pt.read(5).toString(), 'l');
+ t.equal(pt.read(5), null);
+ t.equal(emits, 4);
+ t.end();
+ });
+ pt.end();
+ });
+ pt.write(new Buffer('kuel'));
+ });
+
+ pt.write(new Buffer('bazy'));
+ });
+
+ t.test('passthrough facaded', function(t) {
+ //console.error('passthrough facaded');
+ var pt = new PassThrough();
+ var datas = [];
+ pt.on('data', function(chunk) {
+ datas.push(chunk.toString());
+ });
+
+ pt.on('end', function() {
+ t.same(datas, ['foog', 'bark', 'bazy', 'kuel']);
+ t.end();
+ });
+
+ pt.write(new Buffer('foog'));
+ setTimeout(function() {
+ pt.write(new Buffer('bark'));
+ setTimeout(function() {
+ pt.write(new Buffer('bazy'));
+ setTimeout(function() {
+ pt.write(new Buffer('kuel'));
+ setTimeout(function() {
+ pt.end();
+ }, 10);
+ }, 10);
+ }, 10);
+ }, 10);
+ });
+
+ t.test('object transform (json parse)', function(t) {
+ //console.error('json parse stream');
+ var jp = new Transform({ objectMode: true });
+ jp._transform = function(data, encoding, cb) {
+ try {
+ jp.push(JSON.parse(data));
+ cb();
+ } catch (er) {
+ cb(er);
+ }
+ };
+
+ // anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ var objects = [
+ { foo: 'bar' },
+ 100,
+ 'string',
+ { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }
+ ];
+
+ var ended = false;
+ jp.on('end', function() {
+ ended = true;
+ });
+
+ forEach(objects, function(obj) {
+ jp.write(JSON.stringify(obj));
+ var res = jp.read();
+ t.same(res, obj);
+ });
+
+ jp.end();
+ // read one more time to get the 'end' event
+ jp.read();
+
+ process.nextTick(function() {
+ t.ok(ended);
+ t.end();
+ });
+ });
+
+ t.test('object transform (json stringify)', function(t) {
+ //console.error('json parse stream');
+ var js = new Transform({ objectMode: true });
+ js._transform = function(data, encoding, cb) {
+ try {
+ js.push(JSON.stringify(data));
+ cb();
+ } catch (er) {
+ cb(er);
+ }
+ };
+
+ // anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ var objects = [
+ { foo: 'bar' },
+ 100,
+ 'string',
+ { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }
+ ];
+
+ var ended = false;
+ js.on('end', function() {
+ ended = true;
+ });
+
+ forEach(objects, function(obj) {
+ js.write(obj);
+ var res = js.read();
+ t.equal(res, JSON.stringify(obj));
+ });
+
+ js.end();
+ // read one more time to get the 'end' event
+ js.read();
+
+ process.nextTick(function() {
+ t.ok(ended);
+ t.end();
+ });
+ });
+
+ function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+ }
+};
diff --git a/test/browser/test-stream2-unpipe-drain.js b/test/browser/test-stream2-unpipe-drain.js
new file mode 100644
index 0000000..5fff953
--- /dev/null
+++ b/test/browser/test-stream2-unpipe-drain.js
@@ -0,0 +1,65 @@
+'use strict';
+var common = require('../common');
+var stream = require('../../');
+
+var crypto = require('crypto');
+
+var inherits = require('inherits');
+module.exports = function (t) {
+ t.test('unpipe drain', function (t) {
+ try {
+ crypto.randomBytes(9);
+ } catch(_) {
+ t.ok(true, 'does not suport random, skipping');
+ return t.end();
+ }
+ function TestWriter() {
+ stream.Writable.call(this);
+ }
+ inherits(TestWriter, stream.Writable);
+
+ TestWriter.prototype._write = function(buffer, encoding, callback) {
+ //console.log('write called');
+ // super slow write stream (callback never called)
+ };
+
+ var dest = new TestWriter();
+
+ function TestReader(id) {
+ stream.Readable.call(this);
+ this.reads = 0;
+ }
+ inherits(TestReader, stream.Readable);
+
+ TestReader.prototype._read = function(size) {
+ this.reads += 1;
+ this.push(crypto.randomBytes(size));
+ };
+
+ var src1 = new TestReader();
+ var src2 = new TestReader();
+
+ src1.pipe(dest);
+
+ src1.once('readable', function() {
+ process.nextTick(function() {
+
+ src2.pipe(dest);
+
+ src2.once('readable', function() {
+ process.nextTick(function() {
+
+ src1.unpipe(dest);
+ });
+ });
+ });
+ });
+
+
+ dest.on('unpipe', function() {
+ t.equal(src1.reads, 2);
+ t.equal(src2.reads, 2);
+ t.end();
+ });
+ });
+}
diff --git a/test/browser/test-stream2-writable.js b/test/browser/test-stream2-writable.js
new file mode 100644
index 0000000..6b43aef
--- /dev/null
+++ b/test/browser/test-stream2-writable.js
@@ -0,0 +1,375 @@
+'use strict';
+var common = require('../common');
+var W = require('../../lib/_stream_writable');
+var D = require('../../lib/_stream_duplex');
+
+var inherits = require('inherits');
+inherits(TestWriter, W);
+
+function TestWriter() {
+ W.apply(this, arguments);
+ this.buffer = [];
+ this.written = 0;
+}
+
+TestWriter.prototype._write = function(chunk, encoding, cb) {
+ // simulate a small unpredictable latency
+ setTimeout(function() {
+ this.buffer.push(chunk.toString());
+ this.written += chunk.length;
+ cb();
+ }.bind(this), Math.floor(Math.random() * 10));
+};
+inherits(Processstdout, W);
+
+function Processstdout() {
+ W.apply(this, arguments);
+ this.buffer = [];
+ this.written = 0;
+}
+
+Processstdout.prototype._write = function(chunk, encoding, cb) {
+ //console.log(chunk.toString());
+ cb();
+};
+var chunks = new Array(50);
+for (var i = 0; i < chunks.length; i++) {
+ chunks[i] = new Array(i + 1).join('x');
+}
+
+module.exports = function (t) {
+ var test = t.test;
+
+ if (!process.stdout) {
+ process.stdout = new Processstdout();
+ }
+
+ test('write fast', function(t) {
+ var tw = new TestWriter({
+ highWaterMark: 100
+ });
+
+ tw.on('finish', function() {
+ t.same(tw.buffer, chunks, 'got chunks in the right order');
+ t.end();
+ });
+
+ forEach(chunks, function(chunk) {
+ // screw backpressure. Just buffer it all up.
+ tw.write(chunk);
+ });
+ tw.end();
+ });
+
+ test('write slow', function(t) {
+ var tw = new TestWriter({
+ highWaterMark: 100
+ });
+
+ tw.on('finish', function() {
+ t.same(tw.buffer, chunks, 'got chunks in the right order');
+ t.end();
+ });
+
+ var i = 0;
+ (function W() {
+ tw.write(chunks[i++]);
+ if (i < chunks.length)
+ setTimeout(W, 10);
+ else
+ tw.end();
+ })();
+ });
+
+ test('write backpressure', function(t) {
+ var tw = new TestWriter({
+ highWaterMark: 50
+ });
+
+ var drains = 0;
+
+ tw.on('finish', function() {
+ t.same(tw.buffer, chunks, 'got chunks in the right order');
+ t.equal(drains, 17);
+ t.end();
+ });
+
+ tw.on('drain', function() {
+ drains++;
+ });
+
+ var i = 0;
+ (function W() {
+ do {
+ var ret = tw.write(chunks[i++]);
+ } while (ret !== false && i < chunks.length);
+
+ if (i < chunks.length) {
+ t.ok(tw._writableState.length >= 50);
+ tw.once('drain', W);
+ } else {
+ tw.end();
+ }
+ })();
+ });
+
+ test('write bufferize', function(t) {
+ var tw = new TestWriter({
+ highWaterMark: 100
+ });
+
+ var encodings =
+ [ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined ];
+
+ tw.on('finish', function() {
+ t.same(tw.buffer, chunks, 'got the expected chunks');
+ });
+
+ forEach(chunks, function(chunk, i) {
+ var enc = encodings[ i % encodings.length ];
+ chunk = new Buffer(chunk);
+ tw.write(chunk.toString(enc), enc);
+ });
+ t.end();
+ });
+
+ test('write no bufferize', function(t) {
+ var tw = new TestWriter({
+ highWaterMark: 100,
+ decodeStrings: false
+ });
+
+ tw._write = function(chunk, encoding, cb) {
+ t.equals(typeof chunk, 'string');
+ chunk = new Buffer(chunk, encoding);
+ return TestWriter.prototype._write.call(this, chunk, encoding, cb);
+ };
+
+ var encodings =
+ [ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined ];
+
+ tw.on('finish', function() {
+ t.same(tw.buffer, chunks, 'got the expected chunks');
+ });
+
+ forEach(chunks, function(chunk, i) {
+ var enc = encodings[ i % encodings.length ];
+ chunk = new Buffer(chunk);
+ tw.write(chunk.toString(enc), enc);
+ });
+ t.end();
+ });
+
+ test('write callbacks', function(t) {
+ var callbacks = chunks.map(function(chunk, i) {
+ return [i, function(er) {
+ callbacks._called[i] = chunk;
+ }];
+ }).reduce(function(set, x) {
+ set['callback-' + x[0]] = x[1];
+ return set;
+ }, {});
+ callbacks._called = [];
+
+ var tw = new TestWriter({
+ highWaterMark: 100
+ });
+
+ tw.on('finish', function() {
+ process.nextTick(function() {
+ t.same(tw.buffer, chunks, 'got chunks in the right order');
+ t.same(callbacks._called, chunks, 'called all callbacks');
+ t.end();
+ });
+ });
+
+ forEach(chunks, function(chunk, i) {
+ tw.write(chunk, callbacks['callback-' + i]);
+ });
+ tw.end();
+ });
+
+ test('end callback', function(t) {
+ var tw = new TestWriter();
+ tw.end(function() {
+ t.end();
+ });
+ });
+
+ test('end callback with chunk', function(t) {
+ var tw = new TestWriter();
+ tw.end(new Buffer('hello world'), function() {
+ t.end();
+ });
+ });
+
+ test('end callback with chunk and encoding', function(t) {
+ var tw = new TestWriter();
+ tw.end('hello world', 'ascii', function() {
+ t.end();
+ });
+ });
+
+ test('end callback after .write() call', function(t) {
+ var tw = new TestWriter();
+ tw.write(new Buffer('hello world'));
+ tw.end(function() {
+ t.end();
+ });
+ });
+
+ test('end callback called after write callback', function(t) {
+ var tw = new TestWriter();
+ var writeCalledback = false;
+ tw.write(new Buffer('hello world'), function() {
+ writeCalledback = true;
+ });
+ tw.end(function() {
+ t.equal(writeCalledback, true);
+ t.end();
+ });
+ });
+
+ test('encoding should be ignored for buffers', function(t) {
+ var tw = new W();
+ var hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb';
+ tw._write = function(chunk, encoding, cb) {
+ t.equal(chunk.toString('hex'), hex);
+ t.end();
+ };
+ var buf = new Buffer(hex, 'hex');
+ tw.write(buf, 'binary');
+ });
+
+ test('writables are not pipable', function(t) {
+ var w = new W();
+ w._write = function() {};
+ var gotError = false;
+ w.on('error', function(er) {
+ gotError = true;
+ });
+ w.pipe(process.stdout);
+ t.ok(gotError);
+ t.end();
+ });
+
+ test('duplexes are pipable', function(t) {
+ var d = new D();
+ d._read = function() {};
+ d._write = function() {};
+ var gotError = false;
+ d.on('error', function(er) {
+ gotError = true;
+ });
+ d.pipe(process.stdout);
+ t.ok(!gotError);
+ t.end();
+ });
+
+ test('end(chunk) two times is an error', function(t) {
+ var w = new W();
+ w._write = function() {};
+ var gotError = false;
+ w.on('error', function(er) {
+ gotError = true;
+ t.equal(er.message, 'write after end');
+ });
+ w.end('this is the end');
+ w.end('and so is this');
+ process.nextTick(function() {
+ t.ok(gotError);
+ t.end();
+ });
+ });
+
+ test('dont end while writing', function(t) {
+ var w = new W();
+ var wrote = false;
+ w._write = function(chunk, e, cb) {
+ t.ok(!this.writing);
+ wrote = true;
+ this.writing = true;
+ setTimeout(function() {
+ this.writing = false;
+ cb();
+ });
+ };
+ w.on('finish', function() {
+ t.ok(wrote);
+ t.end();
+ });
+ w.write(Buffer(0));
+ w.end();
+ });
+
+ test('finish does not come before write cb', function(t) {
+ var w = new W();
+ var writeCb = false;
+ w._write = function(chunk, e, cb) {
+ setTimeout(function() {
+ writeCb = true;
+ cb();
+ }, 10);
+ };
+ w.on('finish', function() {
+ t.ok(writeCb);
+ t.end();
+ });
+ w.write(Buffer(0));
+ w.end();
+ });
+
+ test('finish does not come before sync _write cb', function(t) {
+ var w = new W();
+ var writeCb = false;
+ w._write = function(chunk, e, cb) {
+ cb();
+ };
+ w.on('finish', function() {
+ t.ok(writeCb);
+ t.end();
+ });
+ w.write(Buffer(0), function(er) {
+ writeCb = true;
+ });
+ w.end();
+ });
+
+ test('finish is emitted if last chunk is empty', function(t) {
+ var w = new W();
+ w._write = function(chunk, e, cb) {
+ process.nextTick(cb);
+ };
+ w.on('finish', function() {
+ t.end();
+ });
+ w.write(Buffer(1));
+ w.end(Buffer(0));
+ });
+
+ function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+ }
+}
diff --git a/test/browser/test-stream3-pause-then-read.js b/test/browser/test-stream3-pause-then-read.js
new file mode 100644
index 0000000..c81e762
--- /dev/null
+++ b/test/browser/test-stream3-pause-then-read.js
@@ -0,0 +1,150 @@
+'use strict';
+var common = require('../common');
+
+var stream = require('../../');
+var Readable = stream.Readable;
+var Writable = stream.Writable;
+
+module.exports = function (t){
+ t.test('pause then read', function (t) {
+ var totalChunks = 100;
+ var chunkSize = 99;
+ var expectTotalData = totalChunks * chunkSize;
+ var expectEndingData = expectTotalData;
+
+ var r = new Readable({ highWaterMark: 1000 });
+ var chunks = totalChunks;
+ r._read = function(n) {
+ if (!(chunks % 2))
+ setImmediate(push);
+ else if (!(chunks % 3))
+ process.nextTick(push);
+ else
+ push();
+ };
+
+ var totalPushed = 0;
+ function push() {
+ var chunk = chunks-- > 0 ? new Buffer(chunkSize) : null;
+ if (chunk) {
+ totalPushed += chunk.length;
+ chunk.fill('x');
+ }
+ r.push(chunk);
+ }
+
+ read100();
+
+ // first we read 100 bytes
+ function read100() {
+ readn(100, onData);
+ }
+
+ function readn(n, then) {
+ //console.error('read %d', n);
+ expectEndingData -= n;
+ ;(function read() {
+ var c = r.read(n);
+ if (!c)
+ r.once('readable', read);
+ else {
+ t.equal(c.length, n);
+ t.notOk(r._readableState.flowing);
+ then();
+ }
+ })();
+ }
+
+ // then we listen to some data events
+ function onData() {
+ expectEndingData -= 100;
+ //console.error('onData');
+ var seen = 0;
+ r.on('data', function od(c) {
+ seen += c.length;
+ if (seen >= 100) {
+ // seen enough
+ r.removeListener('data', od);
+ r.pause();
+ if (seen > 100) {
+ // oh no, seen too much!
+ // put the extra back.
+ var diff = seen - 100;
+ r.unshift(c.slice(c.length - diff));
+ console.error('seen too much', seen, diff);
+ }
+
+ // Nothing should be lost in between
+ setImmediate(pipeLittle);
+ }
+ });
+ }
+
+ // Just pipe 200 bytes, then unshift the extra and unpipe
+ function pipeLittle() {
+ expectEndingData -= 200;
+ //console.error('pipe a little');
+ var w = new Writable();
+ var written = 0;
+ w.on('finish', function() {
+ t.equal(written, 200);
+ setImmediate(read1234);
+ });
+ w._write = function(chunk, encoding, cb) {
+ written += chunk.length;
+ if (written >= 200) {
+ r.unpipe(w);
+ w.end();
+ cb();
+ if (written > 200) {
+ var diff = written - 200;
+ written -= diff;
+ r.unshift(chunk.slice(chunk.length - diff));
+ }
+ } else {
+ setImmediate(cb);
+ }
+ };
+ r.pipe(w);
+ }
+
+ // now read 1234 more bytes
+ function read1234() {
+ readn(1234, resumePause);
+ }
+
+ function resumePause() {
+ //console.error('resumePause');
+ // don't read anything, just resume and re-pause a whole bunch
+ r.resume();
+ r.pause();
+ r.resume();
+ r.pause();
+ r.resume();
+ r.pause();
+ r.resume();
+ r.pause();
+ r.resume();
+ r.pause();
+ setImmediate(pipe);
+ }
+
+
+ function pipe() {
+ //console.error('pipe the rest');
+ var w = new Writable();
+ var written = 0;
+ w._write = function(chunk, encoding, cb) {
+ written += chunk.length;
+ cb();
+ };
+ w.on('finish', function() {
+ //console.error('written', written, totalPushed);
+ t.equal(written, expectEndingData);
+ t.equal(totalPushed, expectTotalData);
+ t.end();
+ });
+ r.pipe(w);
+ }
+ });
+}
diff --git a/test/common.js b/test/common.js
new file mode 100644
index 0000000..66d36eb
--- /dev/null
+++ b/test/common.js
@@ -0,0 +1,511 @@
+/*<replacement>*/
+if (!global.setImmediate) {
+ global.setImmediate = function setImmediate(fn) {
+ return setTimeout(fn.bind.apply(fn, arguments), 0);
+ };
+}
+if (!global.clearImmediate) {
+ global.clearImmediate = function clearImmediate(i) {
+ return clearTimeout(i);
+ };
+}
+/*</replacement>*/
+'use strict';
+
+/*<replacement>*/
+var objectKeys = objectKeys || function (obj) {
+ var keys = [];
+ for (var key in obj) keys.push(key);
+ return keys;
+}
+/*</replacement>*/
+
+var path = require('path');
+var fs = require('fs');
+var assert = require('assert');
+var os = require('os');
+var child_process = require('child_process');
+
+exports.testDir = path.dirname(__filename);
+exports.fixturesDir = path.join(exports.testDir, 'fixtures');
+exports.libDir = path.join(exports.testDir, '../lib');
+exports.tmpDirName = 'tmp';
+exports.PORT = +process.env.NODE_COMMON_PORT || 12346;
+exports.isWindows = process.platform === 'win32';
+exports.isAix = process.platform === 'aix';
+
+function rimrafSync(p) {
+ try {
+ var st = fs.lstatSync(p);
+ } catch (e) {
+ if (e.code === 'ENOENT')
+ return;
+ }
+
+ try {
+ if (st && st.isDirectory())
+ rmdirSync(p, null);
+ else
+ fs.unlinkSync(p);
+ } catch (e) {
+ if (e.code === 'ENOENT')
+ return;
+ if (e.code === 'EPERM')
+ return rmdirSync(p, e);
+ if (e.code !== 'EISDIR')
+ throw e;
+ rmdirSync(p, e);
+ }
+}
+
+function rmdirSync(p, originalEr) {
+ try {
+ fs.rmdirSync(p);
+ } catch (e) {
+ if (e.code === 'ENOTDIR')
+ throw originalEr;
+ if (e.code === 'ENOTEMPTY' || e.code === 'EEXIST' || e.code === 'EPERM') {
+ forEach(fs.readdirSync(p), function(f) {
+ rimrafSync(path.join(p, f));
+ });
+ fs.rmdirSync(p);
+ }
+ }
+}
+
+exports.refreshTmpDir = function() {
+ rimrafSync(exports.tmpDir);
+ fs.mkdirSync(exports.tmpDir);
+};
+
+if (process.env.TEST_THREAD_ID) {
+ // Distribute ports in parallel tests
+ if (!process.env.NODE_COMMON_PORT)
+ exports.PORT += +process.env.TEST_THREAD_ID * 100;
+
+ exports.tmpDirName += '.' + process.env.TEST_THREAD_ID;
+}
+exports.tmpDir = path.join(exports.testDir, exports.tmpDirName);
+
+var opensslCli = null;
+var inFreeBSDJail = null;
+var localhostIPv4 = null;
+
+/*<replacement>*/if (!process.browser) {
+Object.defineProperty(exports, 'inFreeBSDJail', {
+ get: function() {
+ if (inFreeBSDJail !== null) return inFreeBSDJail;
+
+ if (process.platform === 'freebsd' &&
+ child_process.execSync('sysctl -n security.jail.jailed').toString() ===
+ '1\n') {
+ inFreeBSDJail = true;
+ } else {
+ inFreeBSDJail = false;
+ }
+ return inFreeBSDJail;
+ }
+});
+}/*</replacement>*/
+
+
+/*<replacement>*/if (!process.browser) {
+Object.defineProperty(exports, 'localhostIPv4', {
+ get: function() {
+ if (localhostIPv4 !== null) return localhostIPv4;
+
+ if (exports.inFreeBSDJail) {
+ // Jailed network interfaces are a bit special - since we need to jump
+ // through loops, as well as this being an exception case, assume the
+ // user will provide this instead.
+ if (process.env.LOCALHOST) {
+ localhostIPv4 = process.env.LOCALHOST;
+ } else {
+ console.error('Looks like we\'re in a FreeBSD Jail. ' +
+ 'Please provide your default interface address ' +
+ 'as LOCALHOST or expect some tests to fail.');
+ }
+ }
+
+ if (localhostIPv4 === null) localhostIPv4 = '127.0.0.1';
+
+ return localhostIPv4;
+ }
+});
+}/*</replacement>*/
+
+
+// opensslCli defined lazily to reduce overhead of spawnSync
+/*<replacement>*/if (!process.browser) {
+Object.defineProperty(exports, 'opensslCli', {get: function() {
+ if (opensslCli !== null) return opensslCli;
+
+ if (process.config.variables.node_shared_openssl) {
+ // use external command
+ opensslCli = 'openssl';
+ } else {
+ // use command built from sources included in Node.js repository
+ opensslCli = path.join(path.dirname(process.execPath), 'openssl-cli');
+ }
+
+ if (exports.isWindows) opensslCli += '.exe';
+
+ var openssl_cmd = child_process.spawnSync(opensslCli, ['version']);
+ if (openssl_cmd.status !== 0 || openssl_cmd.error !== undefined) {
+ // openssl command cannot be executed
+ opensslCli = false;
+ }
+ return opensslCli;
+}, enumerable: true });
+}/*</replacement>*/
+
+
+/*<replacement>*/if (!process.browser) {
+Object.defineProperty(exports, 'hasCrypto', {get: function() {
+ return process.versions.openssl ? true : false;
+}});
+}/*</replacement>*/
+
+
+if (exports.isWindows) {
+ exports.PIPE = '\\\\.\\pipe\\libuv-test';
+} else {
+ exports.PIPE = exports.tmpDir + '/test.sock';
+}
+
+if (process.env.NODE_COMMON_PIPE) {
+ exports.PIPE = process.env.NODE_COMMON_PIPE;
+ // Remove manually, the test runner won't do it
+ // for us like it does for files in test/tmp.
+ try {
+ fs.unlinkSync(exports.PIPE);
+ } catch (e) {
+ // Ignore.
+ }
+}
+
+if (exports.isWindows) {
+ exports.faketimeCli = false;
+} else {
+ exports.faketimeCli = path.join(__dirname, '..', 'tools', 'faketime', 'src',
+ 'faketime');
+}
+
+var ifaces = os.networkInterfaces();
+exports.hasIPv6 = objectKeys(ifaces).some(function(name) {
+ return /lo/.test(name) && ifaces[name].some(function(info) {
+ return info.family === 'IPv6';
+ });
+});
+
+var util = require('util');
+for (var i in util) exports[i] = util[i];
+//for (var i in exports) global[i] = exports[i];
+
+function protoCtrChain(o) {
+ var result = [];
+ for (; o; o = o.__proto__) { result.push(o.constructor); }
+ return result.join();
+}
+
+exports.indirectInstanceOf = function(obj, cls) {
+ if (obj instanceof cls) { return true; }
+ var clsChain = protoCtrChain(cls.prototype);
+ var objChain = protoCtrChain(obj);
+ return objChain.slice(-clsChain.length) === clsChain;
+};
+
+
+exports.ddCommand = function(filename, kilobytes) {
+ if (exports.isWindows) {
+ var p = path.resolve(exports.fixturesDir, 'create-file.js');
+ return '"' + process.argv[0] + '" "' + p + '" "' +
+ filename + '" ' + (kilobytes * 1024);
+ } else {
+ return 'dd if=/dev/zero of="' + filename + '" bs=1024 count=' + kilobytes;
+ }
+};
+
+
+exports.spawnCat = function(options) {
+ var spawn = require('child_process').spawn;
+
+ if (exports.isWindows) {
+ return spawn('more', [], options);
+ } else {
+ return spawn('cat', [], options);
+ }
+};
+
+
+exports.spawnSyncCat = function(options) {
+ var spawnSync = require('child_process').spawnSync;
+
+ if (exports.isWindows) {
+ return spawnSync('more', [], options);
+ } else {
+ return spawnSync('cat', [], options);
+ }
+};
+
+
+exports.spawnPwd = function(options) {
+ var spawn = require('child_process').spawn;
+
+ if (exports.isWindows) {
+ return spawn('cmd.exe', ['/c', 'cd'], options);
+ } else {
+ return spawn('pwd', [], options);
+ }
+};
+
+exports.platformTimeout = function(ms) {
+ if (process.arch !== 'arm')
+ return ms;
+
+ if (process.config.variables.arm_version === '6')
+ return 7 * ms; // ARMv6
+
+ return 2 * ms; // ARMv7 and up.
+};
+
+var knownGlobals = [setTimeout,
+ setInterval,
+ setImmediate,
+ clearTimeout,
+ clearInterval,
+ clearImmediate,
+ console,
+ constructor, // Enumerable in V8 3.21.
+ Buffer,
+ process,
+ global];
+
+if (global.gc) {
+ knownGlobals.push(gc);
+}
+
+if (global.DTRACE_HTTP_SERVER_RESPONSE) {
+ knownGlobals.push(DTRACE_HTTP_SERVER_RESPONSE);
+ knownGlobals.push(DTRACE_HTTP_SERVER_REQUEST);
+ knownGlobals.push(DTRACE_HTTP_CLIENT_RESPONSE);
+ knownGlobals.push(DTRACE_HTTP_CLIENT_REQUEST);
+ knownGlobals.push(DTRACE_NET_STREAM_END);
+ knownGlobals.push(DTRACE_NET_SERVER_CONNECTION);
+}
+
+if (global.COUNTER_NET_SERVER_CONNECTION) {
+ knownGlobals.push(COUNTER_NET_SERVER_CONNECTION);
+ knownGlobals.push(COUNTER_NET_SERVER_CONNECTION_CLOSE);
+ knownGlobals.push(COUNTER_HTTP_SERVER_REQUEST);
+ knownGlobals.push(COUNTER_HTTP_SERVER_RESPONSE);
+ knownGlobals.push(COUNTER_HTTP_CLIENT_REQUEST);
+ knownGlobals.push(COUNTER_HTTP_CLIENT_RESPONSE);
+}
+
+if (global.LTTNG_HTTP_SERVER_RESPONSE) {
+ knownGlobals.push(LTTNG_HTTP_SERVER_RESPONSE);
+ knownGlobals.push(LTTNG_HTTP_SERVER_REQUEST);
+ knownGlobals.push(LTTNG_HTTP_CLIENT_RESPONSE);
+ knownGlobals.push(LTTNG_HTTP_CLIENT_REQUEST);
+ knownGlobals.push(LTTNG_NET_STREAM_END);
+ knownGlobals.push(LTTNG_NET_SERVER_CONNECTION);
+}
+
+/*<replacement>*/if (!process.browser) {
+if (global.ArrayBuffer) {
+ knownGlobals.push(ArrayBuffer);
+ knownGlobals.push(Int8Array);
+ knownGlobals.push(Uint8Array);
+ knownGlobals.push(Uint8ClampedArray);
+ knownGlobals.push(Int16Array);
+ knownGlobals.push(Uint16Array);
+ knownGlobals.push(Int32Array);
+ knownGlobals.push(Uint32Array);
+ knownGlobals.push(Float32Array);
+ knownGlobals.push(Float64Array);
+ knownGlobals.push(DataView);
+}
+}/*</replacement>*/
+
+
+// Harmony features.
+if (global.Proxy) {
+ knownGlobals.push(Proxy);
+}
+
+if (global.Symbol) {
+ knownGlobals.push(Symbol);
+}
+
+ /*<replacement>*/
+ if (typeof constructor == 'function')
+ knownGlobals.push(constructor);
+ if (typeof DTRACE_NET_SOCKET_READ == 'function')
+ knownGlobals.push(DTRACE_NET_SOCKET_READ);
+ if (typeof DTRACE_NET_SOCKET_WRITE == 'function')
+ knownGlobals.push(DTRACE_NET_SOCKET_WRITE);
+ /*</replacement>*/
+
+function leakedGlobals() {
+ var leaked = [];
+
+ for (var val in global)
+ if (-1 === knownGlobals.indexOf(global[val]))
+ leaked.push(val);
+
+ return leaked;
+};
+exports.leakedGlobals = leakedGlobals;
+
+// Turn this off if the test should not check for global leaks.
+exports.globalCheck = true;
+
+process.on('exit', function() {
+ if (!exports.globalCheck) return;
+ var leaked = leakedGlobals();
+ if (leaked.length > 0) {
+ console.error('Unknown globals: %s', leaked);
+ assert.ok(false, 'Unknown global found');
+ }
+});
+
+
+var mustCallChecks = [];
+
+
+function runCallChecks(exitCode) {
+ if (exitCode !== 0) return;
+
+ var failed = mustCallChecks.filter(function(context) {
+ return context.actual !== context.expected;
+ });
+
+ forEach(failed, function(context) {
+ console.log('Mismatched %s function calls. Expected %d, actual %d.',
+ context.name,
+ context.expected,
+ context.actual);
+ console.log(context.stack.split('\n').slice(2).join('\n'));
+ });
+
+ if (failed.length) process.exit(1);
+}
+
+
+exports.mustCall = function(fn, expected) {
+ if (typeof expected !== 'number') expected = 1;
+
+ var context = {
+ expected: expected,
+ actual: 0,
+ stack: (new Error()).stack,
+ name: fn.name || '<anonymous>'
+ };
+
+ // add the exit listener only once to avoid listener leak warnings
+ if (mustCallChecks.length === 0) process.on('exit', runCallChecks);
+
+ mustCallChecks.push(context);
+
+ return function() {
+ context.actual++;
+ return fn.apply(this, arguments);
+ };
+};
+
+exports.checkSpawnSyncRet = function(ret) {
+ assert.strictEqual(ret.status, 0);
+ assert.strictEqual(ret.error, undefined);
+};
+
+var etcServicesFileName = path.join('/etc', 'services');
+if (exports.isWindows) {
+ etcServicesFileName = path.join(process.env.SystemRoot, 'System32', 'drivers',
+ 'etc', 'services');
+}
+
+/*
+ * Returns a string that represents the service name associated
+ * to the service bound to port "port" and using protocol "protocol".
+ *
+ * If the service is not defined in the services file, it returns
+ * the port number as a string.
+ *
+ * Returns undefined if /etc/services (or its equivalent on non-UNIX
+ * platforms) can't be read.
+ */
+exports.getServiceName = function getServiceName(port, protocol) {
+ if (port == null) {
+ throw new Error('Missing port number');
+ }
+
+ if (typeof protocol !== 'string') {
+ throw new Error('Protocol must be a string');
+ }
+
+ /*
+ * By default, if a service can't be found in /etc/services,
+ * its name is considered to be its port number.
+ */
+ var serviceName = port.toString();
+
+ try {
+ /*
+ * I'm not a big fan of readFileSync, but reading /etc/services
+ * asynchronously here would require implementing a simple line parser,
+ * which seems overkill for a simple utility function that is not running
+ * concurrently with any other one.
+ */
+ var servicesContent = fs.readFileSync(etcServicesFileName,
+ { encoding: 'utf8'});
+ var regexp = util.format('^(\\w+)\\s+\\s%d/%s\\s', port, protocol);
+ var re = new RegExp(regexp, 'm');
+
+ var matches = re.exec(servicesContent);
+ if (matches && matches.length > 1) {
+ serviceName = matches[1];
+ }
+ } catch(e) {
+ console.error('Cannot read file: ', etcServicesFileName);
+ return undefined;
+ }
+
+ return serviceName;
+};
+
+exports.hasMultiLocalhost = function hasMultiLocalhost() {
+ var TCP = process.binding('tcp_wrap').TCP;
+ var t = new TCP();
+ var ret = t.bind('127.0.0.2', exports.PORT);
+ t.close();
+ return ret === 0;
+};
+
+exports.fileExists = function(pathname) {
+ try {
+ fs.accessSync(pathname);
+ return true;
+ } catch (err) {
+ return false;
+ }
+};
+
+function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+}
+
+if (!util._errnoException) {
+ var uv;
+ util._errnoException = function(err, syscall) {
+ if (util.isUndefined(uv)) try { uv = process.binding('uv'); } catch (e) {}
+ var errname = uv ? uv.errname(err) : '';
+ var e = new Error(syscall + ' ' + errname);
+ e.code = errname;
+ e.errno = errname;
+ e.syscall = syscall;
+ return e;
+ };
+}
diff --git a/test/fixtures/x1024.txt b/test/fixtures/x1024.txt
new file mode 100644
index 0000000..c6a9d2f
--- /dev/null
+++ b/test/fixtures/x1024.txt
@@ -0,0 +1 @@
+xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx [...]
\ No newline at end of file
diff --git a/test/parallel/test-stream-big-packet.js b/test/parallel/test-stream-big-packet.js
new file mode 100644
index 0000000..b2ef864
--- /dev/null
+++ b/test/parallel/test-stream-big-packet.js
@@ -0,0 +1,60 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+var util = require('util');
+var stream = require('../../');
+
+var passed = false;
+
+function PassThrough() {
+ stream.Transform.call(this);
+};
+util.inherits(PassThrough, stream.Transform);
+PassThrough.prototype._transform = function(chunk, encoding, done) {
+ this.push(chunk);
+ done();
+};
+
+function TestStream() {
+ stream.Transform.call(this);
+};
+util.inherits(TestStream, stream.Transform);
+TestStream.prototype._transform = function(chunk, encoding, done) {
+ if (!passed) {
+ // Char 'a' only exists in the last write
+ passed = indexOf(chunk.toString(), 'a') >= 0;
+ }
+ done();
+};
+
+var s1 = new PassThrough();
+var s2 = new PassThrough();
+var s3 = new TestStream();
+s1.pipe(s3);
+// Don't let s2 auto close which may close s3
+s2.pipe(s3, {end: false});
+
+// We must write a buffer larger than highWaterMark
+var big = new Buffer(s1._writableState.highWaterMark + 1);
+big.fill('x');
+
+// Since big is larger than highWaterMark, it will be buffered internally.
+assert(!s1.write(big));
+// 'tiny' is small enough to pass through internal buffer.
+assert(s2.write('tiny'));
+
+// Write some small data in next IO loop, which will never be written to s3
+// Because 'drain' event is not emitted from s1 and s1 is still paused
+setImmediate(s1.write.bind(s1), 'later');
+
+// Assert after two IO loops when all operations have been done.
+process.on('exit', function() {
+ assert(passed, 'Large buffer is not handled properly by Writable Stream');
+});
+
+function indexOf (xs, x) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) return i;
+ }
+ return -1;
+}
diff --git a/test/parallel/test-stream-big-push.js b/test/parallel/test-stream-big-push.js
new file mode 100644
index 0000000..c762fce
--- /dev/null
+++ b/test/parallel/test-stream-big-push.js
@@ -0,0 +1,64 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+var stream = require('../../');
+var str = 'asdfasdfasdfasdfasdf';
+
+var r = new stream.Readable({
+ highWaterMark: 5,
+ encoding: 'utf8'
+});
+
+var reads = 0;
+var eofed = false;
+var ended = false;
+
+r._read = function(n) {
+ if (reads === 0) {
+ setTimeout(function() {
+ r.push(str);
+ });
+ reads++;
+ } else if (reads === 1) {
+ var ret = r.push(str);
+ assert.equal(ret, false);
+ reads++;
+ } else {
+ assert(!eofed);
+ eofed = true;
+ r.push(null);
+ }
+};
+
+r.on('end', function() {
+ ended = true;
+});
+
+// push some data in to start.
+// we've never gotten any read event at this point.
+var ret = r.push(str);
+// should be false. > hwm
+assert(!ret);
+var chunk = r.read();
+assert.equal(chunk, str);
+chunk = r.read();
+assert.equal(chunk, null);
+
+r.once('readable', function() {
+ // this time, we'll get *all* the remaining data, because
+ // it's been added synchronously, as the read WOULD take
+ // us below the hwm, and so it triggered a _read() again,
+ // which synchronously added more, which we then return.
+ chunk = r.read();
+ assert.equal(chunk, str + str);
+
+ chunk = r.read();
+ assert.equal(chunk, null);
+});
+
+process.on('exit', function() {
+ assert(eofed);
+ assert(ended);
+ assert.equal(reads, 2);
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream-duplex.js b/test/parallel/test-stream-duplex.js
new file mode 100644
index 0000000..1abc835
--- /dev/null
+++ b/test/parallel/test-stream-duplex.js
@@ -0,0 +1,32 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Duplex = require('../../').Transform;
+
+var stream = new Duplex({ objectMode: true });
+
+assert(stream._readableState.objectMode);
+assert(stream._writableState.objectMode);
+
+var written;
+var read;
+
+stream._write = function(obj, _, cb) {
+ written = obj;
+ cb();
+};
+
+stream._read = function() {};
+
+stream.on('data', function(obj) {
+ read = obj;
+});
+
+stream.push({ val: 1 });
+stream.end({ val: 2 });
+
+process.on('exit', function() {
+ assert(read.val === 1);
+ assert(written.val === 2);
+});
diff --git a/test/parallel/test-stream-end-paused.js b/test/parallel/test-stream-end-paused.js
new file mode 100644
index 0000000..cbd9857
--- /dev/null
+++ b/test/parallel/test-stream-end-paused.js
@@ -0,0 +1,33 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+var gotEnd = false;
+
+// Make sure we don't miss the end event for paused 0-length streams
+
+var Readable = require('../../').Readable;
+var stream = new Readable();
+var calledRead = false;
+stream._read = function() {
+ assert(!calledRead);
+ calledRead = true;
+ this.push(null);
+};
+
+stream.on('data', function() {
+ throw new Error('should not ever get data');
+});
+stream.pause();
+
+setTimeout(function() {
+ stream.on('end', function() {
+ gotEnd = true;
+ });
+ stream.resume();
+});
+
+process.on('exit', function() {
+ assert(gotEnd);
+ assert(calledRead);
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream-ispaused.js b/test/parallel/test-stream-ispaused.js
new file mode 100644
index 0000000..4a6c286
--- /dev/null
+++ b/test/parallel/test-stream-ispaused.js
@@ -0,0 +1,24 @@
+'use strict';
+var assert = require('assert');
+var common = require('../common');
+
+var stream = require('../../');
+
+var readable = new stream.Readable();
+
+// _read is a noop, here.
+readable._read = Function();
+
+// default state of a stream is not "paused"
+assert.ok(!readable.isPaused());
+
+// make the stream start flowing...
+readable.on('data', Function());
+
+// still not paused.
+assert.ok(!readable.isPaused());
+
+readable.pause();
+assert.ok(readable.isPaused());
+readable.resume();
+assert.ok(!readable.isPaused());
diff --git a/test/parallel/test-stream-pipe-after-end.js b/test/parallel/test-stream-pipe-after-end.js
new file mode 100644
index 0000000..be9361d
--- /dev/null
+++ b/test/parallel/test-stream-pipe-after-end.js
@@ -0,0 +1,66 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Readable = require('../../lib/_stream_readable');
+var Writable = require('../../lib/_stream_writable');
+var util = require('util');
+
+util.inherits(TestReadable, Readable);
+function TestReadable(opt) {
+ if (!(this instanceof TestReadable))
+ return new TestReadable(opt);
+ Readable.call(this, opt);
+ this._ended = false;
+}
+
+TestReadable.prototype._read = function(n) {
+ if (this._ended)
+ this.emit('error', new Error('_read called twice'));
+ this._ended = true;
+ this.push(null);
+};
+
+util.inherits(TestWritable, Writable);
+function TestWritable(opt) {
+ if (!(this instanceof TestWritable))
+ return new TestWritable(opt);
+ Writable.call(this, opt);
+ this._written = [];
+}
+
+TestWritable.prototype._write = function(chunk, encoding, cb) {
+ this._written.push(chunk);
+ cb();
+};
+
+// this one should not emit 'end' until we read() from it later.
+var ender = new TestReadable();
+var enderEnded = false;
+
+// what happens when you pipe() a Readable that's already ended?
+var piper = new TestReadable();
+// pushes EOF null, and length=0, so this will trigger 'end'
+piper.read();
+
+setTimeout(function() {
+ ender.on('end', function() {
+ enderEnded = true;
+ });
+ assert(!enderEnded);
+ var c = ender.read();
+ assert.equal(c, null);
+
+ var w = new TestWritable();
+ var writableFinished = false;
+ w.on('finish', function() {
+ writableFinished = true;
+ });
+ piper.pipe(w);
+
+ process.on('exit', function() {
+ assert(enderEnded);
+ assert(writableFinished);
+ console.log('ok');
+ });
+});
diff --git a/test/parallel/test-stream-pipe-cleanup.js b/test/parallel/test-stream-pipe-cleanup.js
new file mode 100644
index 0000000..fd5644e
--- /dev/null
+++ b/test/parallel/test-stream-pipe-cleanup.js
@@ -0,0 +1,105 @@
+'use strict';
+// This test asserts that Stream.prototype.pipe does not leave listeners
+// hanging on the source or dest.
+
+var common = require('../common');
+var stream = require('../../');
+var assert = require('assert');
+var util = require('util');
+
+if (/^v0\.8\./.test(process.version))
+ return
+
+function Writable() {
+ this.writable = true;
+ this.endCalls = 0;
+ require('stream').Stream.call(this);
+}
+util.inherits(Writable, require('stream').Stream);
+Writable.prototype.end = function() {
+ this.endCalls++;
+};
+
+Writable.prototype.destroy = function() {
+ this.endCalls++;
+};
+
+function Readable() {
+ this.readable = true;
+ require('stream').Stream.call(this);
+}
+util.inherits(Readable, require('stream').Stream);
+
+function Duplex() {
+ this.readable = true;
+ Writable.call(this);
+}
+util.inherits(Duplex, Writable);
+
+var i = 0;
+var limit = 100;
+
+var w = new Writable();
+
+var r;
+
+for (i = 0; i < limit; i++) {
+ r = new Readable();
+ r.pipe(w);
+ r.emit('end');
+}
+assert.equal(0, r.listeners('end').length);
+assert.equal(limit, w.endCalls);
+
+w.endCalls = 0;
+
+for (i = 0; i < limit; i++) {
+ r = new Readable();
+ r.pipe(w);
+ r.emit('close');
+}
+assert.equal(0, r.listeners('close').length);
+assert.equal(limit, w.endCalls);
+
+w.endCalls = 0;
+
+r = new Readable();
+
+for (i = 0; i < limit; i++) {
+ w = new Writable();
+ r.pipe(w);
+ w.emit('close');
+}
+assert.equal(0, w.listeners('close').length);
+
+r = new Readable();
+w = new Writable();
+var d = new Duplex();
+r.pipe(d); // pipeline A
+d.pipe(w); // pipeline B
+assert.equal(r.listeners('end').length, 2); // A.onend, A.cleanup
+assert.equal(r.listeners('close').length, 2); // A.onclose, A.cleanup
+assert.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
+assert.equal(d.listeners('close').length, 3); // A.cleanup, B.onclose, B.cleanup
+assert.equal(w.listeners('end').length, 0);
+assert.equal(w.listeners('close').length, 1); // B.cleanup
+
+r.emit('end');
+assert.equal(d.endCalls, 1);
+assert.equal(w.endCalls, 0);
+assert.equal(r.listeners('end').length, 0);
+assert.equal(r.listeners('close').length, 0);
+assert.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
+assert.equal(d.listeners('close').length, 2); // B.onclose, B.cleanup
+assert.equal(w.listeners('end').length, 0);
+assert.equal(w.listeners('close').length, 1); // B.cleanup
+
+d.emit('end');
+assert.equal(d.endCalls, 1);
+assert.equal(w.endCalls, 1);
+assert.equal(r.listeners('end').length, 0);
+assert.equal(r.listeners('close').length, 0);
+assert.equal(d.listeners('end').length, 0);
+assert.equal(d.listeners('close').length, 0);
+assert.equal(w.listeners('end').length, 0);
+assert.equal(w.listeners('close').length, 0);
diff --git a/test/parallel/test-stream-pipe-error-handling.js b/test/parallel/test-stream-pipe-error-handling.js
new file mode 100644
index 0000000..316b15c
--- /dev/null
+++ b/test/parallel/test-stream-pipe-error-handling.js
@@ -0,0 +1,111 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+var Stream = require('stream').Stream;
+
+(function testErrorListenerCatches() {
+ var source = new Stream();
+ var dest = new Stream();
+
+ source.pipe(dest);
+
+ var gotErr = null;
+ source.on('error', function(err) {
+ gotErr = err;
+ });
+
+ var err = new Error('This stream turned into bacon.');
+ source.emit('error', err);
+ assert.strictEqual(gotErr, err);
+})();
+
+(function testErrorWithoutListenerThrows() {
+ var source = new Stream();
+ var dest = new Stream();
+
+ source.pipe(dest);
+
+ var err = new Error('This stream turned into bacon.');
+
+ var gotErr = null;
+ try {
+ source.emit('error', err);
+ } catch (e) {
+ gotErr = e;
+ }
+
+ assert.strictEqual(gotErr, err);
+})();
+
+(function testErrorWithRemovedListenerThrows() {
+ var EE = require('events').EventEmitter;
+ var R = require('../../').Readable;
+ var W = require('../../').Writable;
+
+ var r = new R();
+ var w = new W();
+ var removed = false;
+ var didTest = false;
+
+ process.on('exit', function() {
+ assert(didTest);
+ console.log('ok');
+ });
+
+ r._read = function() {
+ setTimeout(function() {
+ assert(removed);
+ assert.throws(function() {
+ w.emit('error', new Error('fail'));
+ });
+ didTest = true;
+ });
+ };
+
+ w.on('error', myOnError);
+ r.pipe(w);
+ w.removeListener('error', myOnError);
+ removed = true;
+
+ function myOnError(er) {
+ throw new Error('this should not happen');
+ }
+})();
+
+(function testErrorWithRemovedListenerThrows() {
+ var EE = require('events').EventEmitter;
+ var R = require('../../').Readable;
+ var W = require('../../').Writable;
+
+ var r = new R();
+ var w = new W();
+ var removed = false;
+ var didTest = false;
+ var caught = false;
+
+ process.on('exit', function() {
+ assert(didTest);
+ console.log('ok');
+ });
+
+ r._read = function() {
+ setTimeout(function() {
+ assert(removed);
+ w.emit('error', new Error('fail'));
+ didTest = true;
+ });
+ };
+
+ w.on('error', myOnError);
+ w._write = function() {};
+
+ r.pipe(w);
+ // Removing some OTHER random listener should not do anything
+ w.removeListener('error', function() {});
+ removed = true;
+
+ function myOnError(er) {
+ assert(!caught);
+ caught = true;
+ }
+})();
diff --git a/test/parallel/test-stream-pipe-event.js b/test/parallel/test-stream-pipe-event.js
new file mode 100644
index 0000000..1a52c63
--- /dev/null
+++ b/test/parallel/test-stream-pipe-event.js
@@ -0,0 +1,29 @@
+'use strict';
+var common = require('../common');
+var stream = require('../../');
+var assert = require('assert');
+var util = require('util');
+
+function Writable() {
+ this.writable = true;
+ require('stream').Stream.call(this);
+}
+util.inherits(Writable, require('stream').Stream);
+
+function Readable() {
+ this.readable = true;
+ require('stream').Stream.call(this);
+}
+util.inherits(Readable, require('stream').Stream);
+
+var passed = false;
+
+var w = new Writable();
+w.on('pipe', function(src) {
+ passed = true;
+});
+
+var r = new Readable();
+r.pipe(w);
+
+assert.ok(passed);
diff --git a/test/parallel/test-stream-pipe-without-listenerCount.js b/test/parallel/test-stream-pipe-without-listenerCount.js
new file mode 100644
index 0000000..27bfe38
--- /dev/null
+++ b/test/parallel/test-stream-pipe-without-listenerCount.js
@@ -0,0 +1,20 @@
+'use strict';
+var common = require('../common');
+var stream = require('../../');
+
+var r = new stream({
+ read: noop});
+r.listenerCount = undefined;
+
+var w = new stream();
+w.listenerCount = undefined;
+
+w.on('pipe', function() {
+ r.emit('error', new Error('Readable Error'));
+ w.emit('error', new Error('Writable Error'));
+});
+r.on('error', common.mustCall(noop));
+w.on('error', common.mustCall(noop));
+r.pipe(w);
+
+function noop() {};
diff --git a/test/parallel/test-stream-push-order.js b/test/parallel/test-stream-push-order.js
new file mode 100644
index 0000000..b9e8074
--- /dev/null
+++ b/test/parallel/test-stream-push-order.js
@@ -0,0 +1,32 @@
+'use strict';
+var common = require('../common');
+var Readable = require('../../').Readable;
+var assert = require('assert');
+
+var s = new Readable({
+ highWaterMark: 20,
+ encoding: 'ascii'
+});
+
+var list = ['1', '2', '3', '4', '5', '6'];
+
+s._read = function(n) {
+ var one = list.shift();
+ if (!one) {
+ s.push(null);
+ } else {
+ var two = list.shift();
+ s.push(one);
+ s.push(two);
+ }
+};
+
+var v = s.read(0);
+
+// ACTUALLY [1, 3, 5, 6, 4, 2]
+
+process.on('exit', function() {
+ assert.deepEqual(s._readableState.buffer,
+ ['1', '2', '3', '4', '5', '6']);
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream-push-strings.js b/test/parallel/test-stream-push-strings.js
new file mode 100644
index 0000000..8244a23
--- /dev/null
+++ b/test/parallel/test-stream-push-strings.js
@@ -0,0 +1,46 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Readable = require('../../').Readable;
+var util = require('util');
+
+util.inherits(MyStream, Readable);
+function MyStream(options) {
+ Readable.call(this, options);
+ this._chunks = 3;
+}
+
+MyStream.prototype._read = function(n) {
+ switch (this._chunks--) {
+ case 0:
+ return this.push(null);
+ case 1:
+ return setTimeout(function() {
+ this.push('last chunk');
+ }.bind(this), 100);
+ case 2:
+ return this.push('second to last chunk');
+ case 3:
+ return process.nextTick(function() {
+ this.push('first chunk');
+ }.bind(this));
+ default:
+ throw new Error('?');
+ }
+};
+
+var ms = new MyStream();
+var results = [];
+ms.on('readable', function() {
+ var chunk;
+ while (null !== (chunk = ms.read()))
+ results.push(chunk + '');
+});
+
+var expect = [ 'first chunksecond to last chunk', 'last chunk' ];
+process.on('exit', function() {
+ assert.equal(ms._chunks, -1);
+ assert.deepEqual(results, expect);
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream-readable-constructor-set-methods.js b/test/parallel/test-stream-readable-constructor-set-methods.js
new file mode 100644
index 0000000..9dead92
--- /dev/null
+++ b/test/parallel/test-stream-readable-constructor-set-methods.js
@@ -0,0 +1,19 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Readable = require('../../').Readable;
+
+var _readCalled = false;
+function _read(n) {
+ _readCalled = true;
+ this.push(null);
+}
+
+var r = new Readable({ read: _read });
+r.resume();
+
+process.on('exit', function() {
+ assert.equal(r._read, _read);
+ assert(_readCalled);
+});
diff --git a/test/parallel/test-stream-readable-event.js b/test/parallel/test-stream-readable-event.js
new file mode 100644
index 0000000..ad45939
--- /dev/null
+++ b/test/parallel/test-stream-readable-event.js
@@ -0,0 +1,106 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Readable = require('../../').Readable;
+
+(function first() {
+ // First test, not reading when the readable is added.
+ // make sure that on('readable', ...) triggers a readable event.
+ var r = new Readable({
+ highWaterMark: 3
+ });
+
+ var _readCalled = false;
+ r._read = function(n) {
+ _readCalled = true;
+ };
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(new Buffer('blerg'));
+
+ var caughtReadable = false;
+ setTimeout(function() {
+ // we're testing what we think we are
+ assert(!r._readableState.reading);
+ r.on('readable', function() {
+ caughtReadable = true;
+ });
+ });
+
+ process.on('exit', function() {
+ // we're testing what we think we are
+ assert(!_readCalled);
+
+ assert(caughtReadable);
+ console.log('ok 1');
+ });
+})();
+
+(function second() {
+ // second test, make sure that readable is re-emitted if there's
+ // already a length, while it IS reading.
+
+ var r = new Readable({
+ highWaterMark: 3
+ });
+
+ var _readCalled = false;
+ r._read = function(n) {
+ _readCalled = true;
+ };
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(new Buffer('bl'));
+
+ var caughtReadable = false;
+ setTimeout(function() {
+ // assert we're testing what we think we are
+ assert(r._readableState.reading);
+ r.on('readable', function() {
+ caughtReadable = true;
+ });
+ });
+
+ process.on('exit', function() {
+ // we're testing what we think we are
+ assert(_readCalled);
+
+ assert(caughtReadable);
+ console.log('ok 2');
+ });
+})();
+
+(function third() {
+ // Third test, not reading when the stream has not passed
+ // the highWaterMark but *has* reached EOF.
+ var r = new Readable({
+ highWaterMark: 30
+ });
+
+ var _readCalled = false;
+ r._read = function(n) {
+ _readCalled = true;
+ };
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(new Buffer('blerg'));
+ r.push(null);
+
+ var caughtReadable = false;
+ setTimeout(function() {
+ // assert we're testing what we think we are
+ assert(!r._readableState.reading);
+ r.on('readable', function() {
+ caughtReadable = true;
+ });
+ });
+
+ process.on('exit', function() {
+ // we're testing what we think we are
+ assert(!_readCalled);
+
+ assert(caughtReadable);
+ console.log('ok 3');
+ });
+})();
diff --git a/test/parallel/test-stream-readable-flow-recursion.js b/test/parallel/test-stream-readable-flow-recursion.js
new file mode 100644
index 0000000..abf6795
--- /dev/null
+++ b/test/parallel/test-stream-readable-flow-recursion.js
@@ -0,0 +1,56 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+// this test verifies that passing a huge number to read(size)
+// will push up the highWaterMark, and cause the stream to read
+// more data continuously, but without triggering a nextTick
+// warning or RangeError.
+
+var Readable = require('../../').Readable;
+
+// throw an error if we trigger a nextTick warning.
+process.throwDeprecation = true;
+
+var stream = new Readable({ highWaterMark: 2 });
+var reads = 0;
+var total = 5000;
+stream._read = function(size) {
+ reads++;
+ size = Math.min(size, total);
+ total -= size;
+ if (size === 0)
+ stream.push(null);
+ else
+ stream.push(new Buffer(size));
+};
+
+var depth = 0;
+
+function flow(stream, size, callback) {
+ depth += 1;
+ var chunk = stream.read(size);
+
+ if (!chunk)
+ stream.once('readable', flow.bind(null, stream, size, callback));
+ else
+ callback(chunk);
+
+ depth -= 1;
+ console.log('flow(' + depth + '): exit');
+}
+
+flow(stream, 5000, function() {
+ console.log('complete (' + depth + ')');
+});
+
+process.on('exit', function(code) {
+ assert.equal(reads, 2);
+ // we pushed up the high water mark
+ assert.equal(stream._readableState.highWaterMark, 8192);
+ // length is 0 right now, because we pulled it all out.
+ assert.equal(stream._readableState.length, 0);
+ assert(!code);
+ assert.equal(depth, 0);
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream-transform-constructor-set-methods.js b/test/parallel/test-stream-transform-constructor-set-methods.js
new file mode 100644
index 0000000..876fd5a
--- /dev/null
+++ b/test/parallel/test-stream-transform-constructor-set-methods.js
@@ -0,0 +1,32 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Transform = require('../../').Transform;
+
+var _transformCalled = false;
+function _transform(d, e, n) {
+ _transformCalled = true;
+ n();
+}
+
+var _flushCalled = false;
+function _flush(n) {
+ _flushCalled = true;
+ n();
+}
+
+var t = new Transform({
+ transform: _transform,
+ flush: _flush
+});
+
+t.end(new Buffer('blerg'));
+t.resume();
+
+process.on('exit', function() {
+ assert.equal(t._transform, _transform);
+ assert.equal(t._flush, _flush);
+ assert(_transformCalled);
+ assert(_flushCalled);
+});
diff --git a/test/parallel/test-stream-transform-objectmode-falsey-value.js b/test/parallel/test-stream-transform-objectmode-falsey-value.js
new file mode 100644
index 0000000..e29a072
--- /dev/null
+++ b/test/parallel/test-stream-transform-objectmode-falsey-value.js
@@ -0,0 +1,33 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var stream = require('../../');
+var PassThrough = stream.PassThrough;
+
+var src = new PassThrough({ objectMode: true });
+var tx = new PassThrough({ objectMode: true });
+var dest = new PassThrough({ objectMode: true });
+
+var expect = [ -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ];
+var results = [];
+process.on('exit', function() {
+ assert.deepEqual(results, expect);
+ console.log('ok');
+});
+
+dest.on('data', function(x) {
+ results.push(x);
+});
+
+src.pipe(tx).pipe(dest);
+
+var i = -1;
+var int = setInterval(function() {
+ if (i > 10) {
+ src.end();
+ clearInterval(int);
+ } else {
+ src.write(i++);
+ }
+});
diff --git a/test/parallel/test-stream-transform-split-objectmode.js b/test/parallel/test-stream-transform-split-objectmode.js
new file mode 100644
index 0000000..63df697
--- /dev/null
+++ b/test/parallel/test-stream-transform-split-objectmode.js
@@ -0,0 +1,52 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Transform = require('../../').Transform;
+
+var parser = new Transform({ readableObjectMode : true });
+
+assert(parser._readableState.objectMode);
+assert(!parser._writableState.objectMode);
+assert(parser._readableState.highWaterMark === 16);
+assert(parser._writableState.highWaterMark === (16 * 1024));
+
+parser._transform = function(chunk, enc, callback) {
+ callback(null, { val : chunk[0] });
+};
+
+var parsed;
+
+parser.on('data', function(obj) {
+ parsed = obj;
+});
+
+parser.end(new Buffer([42]));
+
+process.on('exit', function() {
+ assert(parsed.val === 42);
+});
+
+
+var serializer = new Transform({ writableObjectMode : true });
+
+assert(!serializer._readableState.objectMode);
+assert(serializer._writableState.objectMode);
+assert(serializer._readableState.highWaterMark === (16 * 1024));
+assert(serializer._writableState.highWaterMark === 16);
+
+serializer._transform = function(obj, _, callback) {
+ callback(null, new Buffer([obj.val]));
+};
+
+var serialized;
+
+serializer.on('data', function(chunk) {
+ serialized = chunk;
+});
+
+serializer.write({ val : 42 });
+
+process.on('exit', function() {
+ assert(serialized[0] === 42);
+});
diff --git a/test/parallel/test-stream-unshift-empty-chunk.js b/test/parallel/test-stream-unshift-empty-chunk.js
new file mode 100644
index 0000000..19b5b12
--- /dev/null
+++ b/test/parallel/test-stream-unshift-empty-chunk.js
@@ -0,0 +1,61 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+// This test verifies that stream.unshift(Buffer(0)) or
+// stream.unshift('') does not set state.reading=false.
+var Readable = require('../../').Readable;
+
+var r = new Readable();
+var nChunks = 10;
+var chunk = new Buffer(10);
+chunk.fill('x');
+
+r._read = function(n) {
+ setTimeout(function() {
+ r.push(--nChunks === 0 ? null : chunk);
+ });
+};
+
+var readAll = false;
+var seen = [];
+r.on('readable', function() {
+ var chunk;
+ while (chunk = r.read()) {
+ seen.push(chunk.toString());
+ // simulate only reading a certain amount of the data,
+ // and then putting the rest of the chunk back into the
+ // stream, like a parser might do. We just fill it with
+ // 'y' so that it's easy to see which bits were touched,
+ // and which were not.
+ var putBack = new Buffer(readAll ? 0 : 5);
+ putBack.fill('y');
+ readAll = !readAll;
+ r.unshift(putBack);
+ }
+});
+
+var expect =
+ [ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy' ];
+
+r.on('end', function() {
+ assert.deepEqual(seen, expect);
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js
new file mode 100644
index 0000000..2667235
--- /dev/null
+++ b/test/parallel/test-stream-unshift-read-race.js
@@ -0,0 +1,113 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+// This test verifies that:
+// 1. unshift() does not cause colliding _read() calls.
+// 2. unshift() after the 'end' event is an error, but after the EOF
+// signalling null, it is ok, and just creates a new readable chunk.
+// 3. push() after the EOF signaling null is an error.
+// 4. _read() is not called after pushing the EOF null chunk.
+
+var stream = require('../../');
+var hwm = 10;
+var r = stream.Readable({ highWaterMark: hwm });
+var chunks = 10;
+var t = (chunks * 5);
+
+var data = new Buffer(chunks * hwm + Math.ceil(hwm / 2));
+for (var i = 0; i < data.length; i++) {
+ var c = 'asdf'.charCodeAt(i % 4);
+ data[i] = c;
+}
+
+var pos = 0;
+var pushedNull = false;
+r._read = function(n) {
+ assert(!pushedNull, '_read after null push');
+
+ // every third chunk is fast
+ push(!(chunks % 3));
+
+ function push(fast) {
+ assert(!pushedNull, 'push() after null push');
+ var c = pos >= data.length ? null : data.slice(pos, Math.min(pos + n, data.length));
+ pushedNull = c === null;
+ if (fast) {
+ pos += n;
+ r.push(c);
+ if (c === null) pushError();
+ } else {
+ setTimeout(function() {
+ pos += n;
+ r.push(c);
+ if (c === null) pushError();
+ });
+ }
+ }
+};
+
+function pushError() {
+ assert.throws(function() {
+ r.push(new Buffer(1));
+ });
+}
+
+
+var w = stream.Writable();
+var written = [];
+w._write = function(chunk, encoding, cb) {
+ written.push(chunk.toString());
+ cb();
+};
+
+var ended = false;
+r.on('end', function() {
+ assert(!ended, 'end emitted more than once');
+ assert.throws(function() {
+ r.unshift(new Buffer(1));
+ });
+ ended = true;
+ w.end();
+});
+
+r.on('readable', function() {
+ var chunk;
+ while (null !== (chunk = r.read(10))) {
+ w.write(chunk);
+ if (chunk.length > 4)
+ r.unshift(new Buffer('1234'));
+ }
+});
+
+var finished = false;
+w.on('finish', function() {
+ finished = true;
+ // each chunk should start with 1234, and then be asfdasdfasdf...
+ // The first got pulled out before the first unshift('1234'), so it's
+ // lacking that piece.
+ assert.equal(written[0], 'asdfasdfas');
+ var asdf = 'd';
+ console.error('0: %s', written[0]);
+ for (var i = 1; i < written.length; i++) {
+ console.error('%s: %s', i.toString(32), written[i]);
+ assert.equal(written[i].slice(0, 4), '1234');
+ for (var j = 4; j < written[i].length; j++) {
+ var c = written[i].charAt(j);
+ assert.equal(c, asdf);
+ switch (asdf) {
+ case 'a': asdf = 's'; break;
+ case 's': asdf = 'd'; break;
+ case 'd': asdf = 'f'; break;
+ case 'f': asdf = 'a'; break;
+ }
+ }
+ }
+});
+
+process.on('exit', function() {
+ assert.equal(written.length, 18);
+ assert(ended, 'stream ended');
+ assert(finished, 'stream finished');
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js
new file mode 100644
index 0000000..5d80abc
--- /dev/null
+++ b/test/parallel/test-stream-writable-change-default-encoding.js
@@ -0,0 +1,52 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var stream = require('../../');
+var util = require('util');
+
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options);
+ this.fn = fn;
+};
+
+util.inherits(MyWritable, stream.Writable);
+
+MyWritable.prototype._write = function(chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding);
+ callback();
+};
+
+(function defaultCondingIsUtf8() {
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ assert.equal(enc, 'utf8');
+ }, { decodeStrings: false });
+ m.write('foo');
+ m.end();
+}());
+
+(function changeDefaultEncodingToAscii() {
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ assert.equal(enc, 'ascii');
+ }, { decodeStrings: false });
+ m.setDefaultEncoding('ascii');
+ m.write('bar');
+ m.end();
+}());
+
+assert.throws(function changeDefaultEncodingToInvalidValue() {
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ }, { decodeStrings: false });
+ m.setDefaultEncoding({});
+ m.write('bar');
+ m.end();
+}, TypeError);
+
+(function checkVairableCaseEncoding() {
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ assert.equal(enc, 'ascii');
+ }, { decodeStrings: false });
+ m.setDefaultEncoding('AsCii');
+ m.write('bar');
+ m.end();
+}());
diff --git a/test/parallel/test-stream-writable-constructor-set-methods.js b/test/parallel/test-stream-writable-constructor-set-methods.js
new file mode 100644
index 0000000..ea35dfc
--- /dev/null
+++ b/test/parallel/test-stream-writable-constructor-set-methods.js
@@ -0,0 +1,35 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Writable = require('../../').Writable;
+
+var _writeCalled = false;
+function _write(d, e, n) {
+ _writeCalled = true;
+}
+
+var w = new Writable({ write: _write });
+w.end(new Buffer('blerg'));
+
+var _writevCalled = false;
+var dLength = 0;
+function _writev(d, n) {
+ dLength = d.length;
+ _writevCalled = true;
+}
+
+var w2 = new Writable({ writev: _writev });
+w2.cork();
+
+w2.write(new Buffer('blerg'));
+w2.write(new Buffer('blerg'));
+w2.end();
+
+process.on('exit', function() {
+ assert.equal(w._write, _write);
+ assert(_writeCalled);
+ assert.equal(w2._writev, _writev);
+ assert.equal(dLength, 2);
+ assert(_writevCalled);
+});
diff --git a/test/parallel/test-stream-writable-decoded-encoding.js b/test/parallel/test-stream-writable-decoded-encoding.js
new file mode 100644
index 0000000..06d11e2
--- /dev/null
+++ b/test/parallel/test-stream-writable-decoded-encoding.js
@@ -0,0 +1,40 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var stream = require('../../');
+var util = require('util');
+
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options);
+ this.fn = fn;
+};
+
+util.inherits(MyWritable, stream.Writable);
+
+MyWritable.prototype._write = function(chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding);
+ callback();
+};
+
+;(function decodeStringsTrue() {
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ assert(isBuffer);
+ assert.equal(type, 'object');
+ assert.equal(enc, 'buffer');
+ console.log('ok - decoded string is decoded');
+ }, { decodeStrings: true });
+ m.write('some-text', 'utf8');
+ m.end();
+})();
+
+;(function decodeStringsFalse() {
+ var m = new MyWritable(function(isBuffer, type, enc) {
+ assert(!isBuffer);
+ assert.equal(type, 'string');
+ assert.equal(enc, 'utf8');
+ console.log('ok - un-decoded string is not decoded');
+ }, { decodeStrings: false });
+ m.write('some-text', 'utf8');
+ m.end();
+})();
diff --git a/test/parallel/test-stream-writev.js b/test/parallel/test-stream-writev.js
new file mode 100644
index 0000000..5dea49b
--- /dev/null
+++ b/test/parallel/test-stream-writev.js
@@ -0,0 +1,106 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var stream = require('../../');
+
+var queue = [];
+for (var decode = 0; decode < 2; decode++) {
+ for (var uncork = 0; uncork < 2; uncork++) {
+ for (var multi = 0; multi < 2; multi++) {
+ queue.push([!!decode, !!uncork, !!multi]);
+ }
+ }
+}
+
+run();
+
+function run() {
+ var t = queue.pop();
+ if (t)
+ test(t[0], t[1], t[2], run);
+ else
+ console.log('ok');
+}
+
+function test(decode, uncork, multi, next) {
+ console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi);
+ var counter = 0;
+ var expectCount = 0;
+ function cnt(msg) {
+ expectCount++;
+ var expect = expectCount;
+ var called = false;
+ return function(er) {
+ if (er)
+ throw er;
+ called = true;
+ counter++;
+ assert.equal(counter, expect);
+ };
+ }
+
+ var w = new stream.Writable({ decodeStrings: decode });
+ w._write = function(chunk, e, cb) {
+ assert(false, 'Should not call _write');
+ };
+
+ var expectChunks = decode ?
+ [
+ { encoding: 'buffer',
+ chunk: [104, 101, 108, 108, 111, 44, 32] },
+ { encoding: 'buffer',
+ chunk: [119, 111, 114, 108, 100] },
+ { encoding: 'buffer',
+ chunk: [33] },
+ { encoding: 'buffer',
+ chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] },
+ { encoding: 'buffer',
+ chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173]}
+ ] : [
+ { encoding: 'ascii', chunk: 'hello, ' },
+ { encoding: 'utf8', chunk: 'world' },
+ { encoding: 'buffer', chunk: [33] },
+ { encoding: 'binary', chunk: '\nand then...' },
+ { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' }
+ ];
+
+ var actualChunks;
+ w._writev = function(chunks, cb) {
+ actualChunks = chunks.map(function(chunk) {
+ return {
+ encoding: chunk.encoding,
+ chunk: Buffer.isBuffer(chunk.chunk) ?
+ Array.prototype.slice.call(chunk.chunk) : chunk.chunk
+ };
+ });
+ cb();
+ };
+
+ w.cork();
+ w.write('hello, ', 'ascii', cnt('hello'));
+ w.write('world', 'utf8', cnt('world'));
+
+ if (multi)
+ w.cork();
+
+ w.write(new Buffer('!'), 'buffer', cnt('!'));
+ w.write('\nand then...', 'binary', cnt('and then'));
+
+ if (multi)
+ w.uncork();
+
+ w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'));
+
+ if (uncork)
+ w.uncork();
+
+ w.end(cnt('end'));
+
+ w.on('finish', function() {
+ // make sure finish comes after all the write cb
+ cnt('finish')();
+ assert.deepEqual(expectChunks, actualChunks);
+ next();
+ });
+}
diff --git a/test/parallel/test-stream2-base64-single-char-read-end.js b/test/parallel/test-stream2-base64-single-char-read-end.js
new file mode 100644
index 0000000..fc7063b
--- /dev/null
+++ b/test/parallel/test-stream2-base64-single-char-read-end.js
@@ -0,0 +1,37 @@
+'use strict';
+var common = require('../common');
+var R = require('../../lib/_stream_readable');
+var W = require('../../lib/_stream_writable');
+var assert = require('assert');
+
+var src = new R({encoding: 'base64'});
+var dst = new W();
+var hasRead = false;
+var accum = [];
+var timeout;
+
+src._read = function(n) {
+ if (!hasRead) {
+ hasRead = true;
+ process.nextTick(function() {
+ src.push(new Buffer('1'));
+ src.push(null);
+ });
+ };
+};
+
+dst._write = function(chunk, enc, cb) {
+ accum.push(chunk);
+ cb();
+};
+
+src.on('end', function() {
+ assert.equal(Buffer.concat(accum) + '', 'MQ==');
+ clearTimeout(timeout);
+});
+
+src.pipe(dst);
+
+timeout = setTimeout(function() {
+ assert.fail('timed out waiting for _write');
+}, 100);
diff --git a/test/parallel/test-stream2-compatibility.js b/test/parallel/test-stream2-compatibility.js
new file mode 100644
index 0000000..e0a3a91
--- /dev/null
+++ b/test/parallel/test-stream2-compatibility.js
@@ -0,0 +1,32 @@
+'use strict';
+var common = require('../common');
+var R = require('../../lib/_stream_readable');
+var assert = require('assert');
+
+var util = require('util');
+var EE = require('events').EventEmitter;
+
+var ondataCalled = 0;
+
+function TestReader() {
+ R.apply(this);
+ this._buffer = new Buffer(100);
+ this._buffer.fill('x');
+
+ this.on('data', function() {
+ ondataCalled++;
+ });
+}
+
+util.inherits(TestReader, R);
+
+TestReader.prototype._read = function(n) {
+ this.push(this._buffer);
+ this._buffer = new Buffer(0);
+};
+
+var reader = new TestReader();
+setImmediate(function() {
+ assert.equal(ondataCalled, 1);
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream2-finish-pipe.js b/test/parallel/test-stream2-finish-pipe.js
new file mode 100644
index 0000000..bfdd212
--- /dev/null
+++ b/test/parallel/test-stream2-finish-pipe.js
@@ -0,0 +1,21 @@
+'use strict';
+var common = require('../common');
+var stream = require('../../');
+var Buffer = require('buffer').Buffer;
+
+var r = new stream.Readable();
+r._read = function(size) {
+ r.push(new Buffer(size));
+};
+
+var w = new stream.Writable();
+w._write = function(data, encoding, cb) {
+ cb(null);
+};
+
+r.pipe(w);
+
+// This might sound unrealistic, but it happens in net.js. When
+// `socket.allowHalfOpen === false`, EOF will cause `.destroySoon()` call which
+// ends the writable side of net.Socket.
+w.end();
diff --git a/test/parallel/test-stream2-large-read-stall.js b/test/parallel/test-stream2-large-read-stall.js
new file mode 100644
index 0000000..334eff8
--- /dev/null
+++ b/test/parallel/test-stream2-large-read-stall.js
@@ -0,0 +1,62 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+// If everything aligns so that you do a read(n) of exactly the
+// remaining buffer, then make sure that 'end' still emits.
+
+var READSIZE = 100;
+var PUSHSIZE = 20;
+var PUSHCOUNT = 1000;
+var HWM = 50;
+
+var Readable = require('../../').Readable;
+var r = new Readable({
+ highWaterMark: HWM
+});
+var rs = r._readableState;
+
+r._read = push;
+
+r.on('readable', function() {
+ ;false && console.error('>> readable');
+ do {
+ ;false && console.error(' > read(%d)', READSIZE);
+ var ret = r.read(READSIZE);
+ ;false && console.error(' < %j (%d remain)', ret && ret.length, rs.length);
+ } while (ret && ret.length === READSIZE);
+
+ ;false && console.error('<< after read()',
+ ret && ret.length,
+ rs.needReadable,
+ rs.length);
+});
+
+var endEmitted = false;
+r.on('end', function() {
+ endEmitted = true;
+ ;false && console.error('end');
+});
+
+var pushes = 0;
+function push() {
+ if (pushes > PUSHCOUNT)
+ return;
+
+ if (pushes++ === PUSHCOUNT) {
+ ;false && console.error(' push(EOF)');
+ return r.push(null);
+ }
+
+ ;false && console.error(' push #%d', pushes);
+ if (r.push(new Buffer(PUSHSIZE)))
+ setTimeout(push);
+}
+
+// start the flow
+var ret = r.read(0);
+
+process.on('exit', function() {
+ assert.equal(pushes, PUSHCOUNT + 1);
+ assert(endEmitted);
+});
diff --git a/test/parallel/test-stream2-objects.js b/test/parallel/test-stream2-objects.js
new file mode 100644
index 0000000..1c3e66c
--- /dev/null
+++ b/test/parallel/test-stream2-objects.js
@@ -0,0 +1,336 @@
+'use strict';
+var common = require('../common');
+var Readable = require('../../lib/_stream_readable');
+var Writable = require('../../lib/_stream_writable');
+var assert = require('assert');
+
+// tiny node-tap lookalike.
+var tests = [];
+var count = 0;
+
+function test(name, fn) {
+ count++;
+ tests.push([name, fn]);
+}
+
+function run() {
+ var next = tests.shift();
+ if (!next)
+ return console.error('ok');
+
+ var name = next[0];
+ var fn = next[1];
+ console.log('# %s', name);
+ fn({
+ same: assert.deepEqual,
+ equal: assert.equal,
+ end: function() {
+ count--;
+ run();
+ }
+ });
+}
+
+// ensure all tests have run
+process.on('exit', function() {
+ assert.equal(count, 0);
+});
+
+process.nextTick(run);
+
+function toArray(callback) {
+ var stream = new Writable({ objectMode: true });
+ var list = [];
+ stream.write = function(chunk) {
+ list.push(chunk);
+ };
+
+ stream.end = function() {
+ callback(list);
+ };
+
+ return stream;
+}
+
+function fromArray(list) {
+ var r = new Readable({ objectMode: true });
+ r._read = noop;
+ forEach(list, function(chunk) {
+ r.push(chunk);
+ });
+ r.push(null);
+
+ return r;
+}
+
+function noop() {}
+
+test('can read objects from stream', function(t) {
+ var r = fromArray([{ one: '1'}, { two: '2' }]);
+
+ var v1 = r.read();
+ var v2 = r.read();
+ var v3 = r.read();
+
+ assert.deepEqual(v1, { one: '1' });
+ assert.deepEqual(v2, { two: '2' });
+ assert.deepEqual(v3, null);
+
+ t.end();
+});
+
+test('can pipe objects into stream', function(t) {
+ var r = fromArray([{ one: '1'}, { two: '2' }]);
+
+ r.pipe(toArray(function(list) {
+ assert.deepEqual(list, [
+ { one: '1' },
+ { two: '2' }
+ ]);
+
+ t.end();
+ }));
+});
+
+test('read(n) is ignored', function(t) {
+ var r = fromArray([{ one: '1'}, { two: '2' }]);
+
+ var value = r.read(2);
+
+ assert.deepEqual(value, { one: '1' });
+
+ t.end();
+});
+
+test('can read objects from _read (sync)', function(t) {
+ var r = new Readable({ objectMode: true });
+ var list = [{ one: '1'}, { two: '2' }];
+ r._read = function(n) {
+ var item = list.shift();
+ r.push(item || null);
+ };
+
+ r.pipe(toArray(function(list) {
+ assert.deepEqual(list, [
+ { one: '1' },
+ { two: '2' }
+ ]);
+
+ t.end();
+ }));
+});
+
+test('can read objects from _read (async)', function(t) {
+ var r = new Readable({ objectMode: true });
+ var list = [{ one: '1'}, { two: '2' }];
+ r._read = function(n) {
+ var item = list.shift();
+ process.nextTick(function() {
+ r.push(item || null);
+ });
+ };
+
+ r.pipe(toArray(function(list) {
+ assert.deepEqual(list, [
+ { one: '1' },
+ { two: '2' }
+ ]);
+
+ t.end();
+ }));
+});
+
+test('can read strings as objects', function(t) {
+ var r = new Readable({
+ objectMode: true
+ });
+ r._read = noop;
+ var list = ['one', 'two', 'three'];
+ forEach(list, function(str) {
+ r.push(str);
+ });
+ r.push(null);
+
+ r.pipe(toArray(function(array) {
+ assert.deepEqual(array, list);
+
+ t.end();
+ }));
+});
+
+test('read(0) for object streams', function(t) {
+ var r = new Readable({
+ objectMode: true
+ });
+ r._read = noop;
+
+ r.push('foobar');
+ r.push(null);
+
+ var v = r.read(0);
+
+ r.pipe(toArray(function(array) {
+ assert.deepEqual(array, ['foobar']);
+
+ t.end();
+ }));
+});
+
+test('falsey values', function(t) {
+ var r = new Readable({
+ objectMode: true
+ });
+ r._read = noop;
+
+ r.push(false);
+ r.push(0);
+ r.push('');
+ r.push(null);
+
+ r.pipe(toArray(function(array) {
+ assert.deepEqual(array, [false, 0, '']);
+
+ t.end();
+ }));
+});
+
+test('high watermark _read', function(t) {
+ var r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ });
+ var calls = 0;
+ var list = ['1', '2', '3', '4', '5', '6', '7', '8'];
+
+ r._read = function(n) {
+ calls++;
+ };
+
+ forEach(list, function(c) {
+ r.push(c);
+ });
+
+ var v = r.read();
+
+ assert.equal(calls, 0);
+ assert.equal(v, '1');
+
+ var v2 = r.read();
+ assert.equal(v2, '2');
+
+ var v3 = r.read();
+ assert.equal(v3, '3');
+
+ assert.equal(calls, 1);
+
+ t.end();
+});
+
+test('high watermark push', function(t) {
+ var r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ });
+ r._read = function(n) {};
+ for (var i = 0; i < 6; i++) {
+ var bool = r.push(i);
+ assert.equal(bool, i === 5 ? false : true);
+ }
+
+ t.end();
+});
+
+test('can write objects to stream', function(t) {
+ var w = new Writable({ objectMode: true });
+
+ w._write = function(chunk, encoding, cb) {
+ assert.deepEqual(chunk, { foo: 'bar' });
+ cb();
+ };
+
+ w.on('finish', function() {
+ t.end();
+ });
+
+ w.write({ foo: 'bar' });
+ w.end();
+});
+
+test('can write multiple objects to stream', function(t) {
+ var w = new Writable({ objectMode: true });
+ var list = [];
+
+ w._write = function(chunk, encoding, cb) {
+ list.push(chunk);
+ cb();
+ };
+
+ w.on('finish', function() {
+ assert.deepEqual(list, [0, 1, 2, 3, 4]);
+
+ t.end();
+ });
+
+ w.write(0);
+ w.write(1);
+ w.write(2);
+ w.write(3);
+ w.write(4);
+ w.end();
+});
+
+test('can write strings as objects', function(t) {
+ var w = new Writable({
+ objectMode: true
+ });
+ var list = [];
+
+ w._write = function(chunk, encoding, cb) {
+ list.push(chunk);
+ process.nextTick(cb);
+ };
+
+ w.on('finish', function() {
+ assert.deepEqual(list, ['0', '1', '2', '3', '4']);
+
+ t.end();
+ });
+
+ w.write('0');
+ w.write('1');
+ w.write('2');
+ w.write('3');
+ w.write('4');
+ w.end();
+});
+
+test('buffers finish until cb is called', function(t) {
+ var w = new Writable({
+ objectMode: true
+ });
+ var called = false;
+
+ w._write = function(chunk, encoding, cb) {
+ assert.equal(chunk, 'foo');
+
+ process.nextTick(function() {
+ called = true;
+ cb();
+ });
+ };
+
+ w.on('finish', function() {
+ assert.equal(called, true);
+
+ t.end();
+ });
+
+ w.write('foo');
+ w.end();
+});
+
+function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+}
diff --git a/test/parallel/test-stream2-pipe-error-handling.js b/test/parallel/test-stream2-pipe-error-handling.js
new file mode 100644
index 0000000..98d452c
--- /dev/null
+++ b/test/parallel/test-stream2-pipe-error-handling.js
@@ -0,0 +1,85 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+var stream = require('../../');
+
+(function testErrorListenerCatches() {
+ var count = 1000;
+
+ var source = new stream.Readable();
+ source._read = function(n) {
+ n = Math.min(count, n);
+ count -= n;
+ source.push(new Buffer(n));
+ };
+
+ var unpipedDest;
+ source.unpipe = function(dest) {
+ unpipedDest = dest;
+ stream.Readable.prototype.unpipe.call(this, dest);
+ };
+
+ var dest = new stream.Writable();
+ dest._write = function(chunk, encoding, cb) {
+ cb();
+ };
+
+ source.pipe(dest);
+
+ var gotErr = null;
+ dest.on('error', function(err) {
+ gotErr = err;
+ });
+
+ var unpipedSource;
+ dest.on('unpipe', function(src) {
+ unpipedSource = src;
+ });
+
+ var err = new Error('This stream turned into bacon.');
+ dest.emit('error', err);
+ assert.strictEqual(gotErr, err);
+ assert.strictEqual(unpipedSource, source);
+ assert.strictEqual(unpipedDest, dest);
+})();
+
+(function testErrorWithoutListenerThrows() {
+ var count = 1000;
+
+ var source = new stream.Readable();
+ source._read = function(n) {
+ n = Math.min(count, n);
+ count -= n;
+ source.push(new Buffer(n));
+ };
+
+ var unpipedDest;
+ source.unpipe = function(dest) {
+ unpipedDest = dest;
+ stream.Readable.prototype.unpipe.call(this, dest);
+ };
+
+ var dest = new stream.Writable();
+ dest._write = function(chunk, encoding, cb) {
+ cb();
+ };
+
+ source.pipe(dest);
+
+ var unpipedSource;
+ dest.on('unpipe', function(src) {
+ unpipedSource = src;
+ });
+
+ var err = new Error('This stream turned into bacon.');
+
+ var gotErr = null;
+ try {
+ dest.emit('error', err);
+ } catch (e) {
+ gotErr = e;
+ }
+ assert.strictEqual(gotErr, err);
+ assert.strictEqual(unpipedSource, source);
+ assert.strictEqual(unpipedDest, dest);
+})();
diff --git a/test/parallel/test-stream2-pipe-error-once-listener.js b/test/parallel/test-stream2-pipe-error-once-listener.js
new file mode 100644
index 0000000..0b6766d
--- /dev/null
+++ b/test/parallel/test-stream2-pipe-error-once-listener.js
@@ -0,0 +1,43 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var util = require('util');
+var stream = require('../../');
+
+
+var Read = function() {
+ stream.Readable.call(this);
+};
+util.inherits(Read, stream.Readable);
+
+Read.prototype._read = function(size) {
+ this.push('x');
+ this.push(null);
+};
+
+
+var Write = function() {
+ stream.Writable.call(this);
+};
+util.inherits(Write, stream.Writable);
+
+Write.prototype._write = function(buffer, encoding, cb) {
+ this.emit('error', new Error('boom'));
+ this.emit('alldone');
+};
+
+var read = new Read();
+var write = new Write();
+
+write.once('error', function(err) {});
+write.once('alldone', function(err) {
+ console.log('ok');
+});
+
+process.on('exit', function(c) {
+ console.error('error thrown even with listener');
+});
+
+read.pipe(write);
+
diff --git a/test/parallel/test-stream2-push.js b/test/parallel/test-stream2-push.js
new file mode 100644
index 0000000..a4ba95d
--- /dev/null
+++ b/test/parallel/test-stream2-push.js
@@ -0,0 +1,118 @@
+'use strict';
+var common = require('../common');
+var stream = require('../../');
+var Readable = stream.Readable;
+var Writable = stream.Writable;
+var assert = require('assert');
+
+var util = require('util');
+var EE = require('events').EventEmitter;
+
+
+// a mock thing a bit like the net.Socket/tcp_wrap.handle interaction
+
+var stream = new Readable({
+ highWaterMark: 16,
+ encoding: 'utf8'
+});
+
+var source = new EE();
+
+stream._read = function() {
+ console.error('stream._read');
+ readStart();
+};
+
+var ended = false;
+stream.on('end', function() {
+ ended = true;
+});
+
+source.on('data', function(chunk) {
+ var ret = stream.push(chunk);
+ console.error('data', stream._readableState.length);
+ if (!ret)
+ readStop();
+});
+
+source.on('end', function() {
+ stream.push(null);
+});
+
+var reading = false;
+
+function readStart() {
+ console.error('readStart');
+ reading = true;
+}
+
+function readStop() {
+ console.error('readStop');
+ reading = false;
+ process.nextTick(function() {
+ var r = stream.read();
+ if (r !== null)
+ writer.write(r);
+ });
+}
+
+var writer = new Writable({
+ decodeStrings: false
+});
+
+var written = [];
+
+var expectWritten =
+ [ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg' ];
+
+writer._write = function(chunk, encoding, cb) {
+ console.error('WRITE %s', chunk);
+ written.push(chunk);
+ process.nextTick(cb);
+};
+
+writer.on('finish', finish);
+
+
+// now emit some chunks.
+
+var chunk = 'asdfg';
+
+var set = 0;
+readStart();
+data();
+function data() {
+ assert(reading);
+ source.emit('data', chunk);
+ assert(reading);
+ source.emit('data', chunk);
+ assert(reading);
+ source.emit('data', chunk);
+ assert(reading);
+ source.emit('data', chunk);
+ assert(!reading);
+ if (set++ < 5)
+ setTimeout(data, 10);
+ else
+ end();
+}
+
+function finish() {
+ console.error('finish');
+ assert.deepEqual(written, expectWritten);
+ console.log('ok');
+}
+
+function end() {
+ source.emit('end');
+ assert(!reading);
+ writer.end(stream.read());
+ setTimeout(function() {
+ assert(ended);
+ });
+}
diff --git a/test/parallel/test-stream2-read-sync-stack.js b/test/parallel/test-stream2-read-sync-stack.js
new file mode 100644
index 0000000..cf1bcf2
--- /dev/null
+++ b/test/parallel/test-stream2-read-sync-stack.js
@@ -0,0 +1,34 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+var Readable = require('../../').Readable;
+var r = new Readable();
+var N = 256 * 1024;
+
+// Go ahead and allow the pathological case for this test.
+// Yes, it's an infinite loop, that's the point.
+process.maxTickDepth = N + 2;
+
+var reads = 0;
+r._read = function(n) {
+ var chunk = reads++ === N ? null : new Buffer(1);
+ r.push(chunk);
+};
+
+r.on('readable', function onReadable() {
+ if (!(r._readableState.length % 256))
+ console.error('readable', r._readableState.length);
+ r.read(N * 2);
+});
+
+var ended = false;
+r.on('end', function onEnd() {
+ ended = true;
+});
+
+r.read(0);
+
+process.on('exit', function() {
+ assert(ended);
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js
new file mode 100644
index 0000000..30f9b32
--- /dev/null
+++ b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js
@@ -0,0 +1,98 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Readable = require('../../').Readable;
+
+test1();
+test2();
+
+function test1() {
+ var r = new Readable();
+
+ // should not end when we get a Buffer(0) or '' as the _read result
+ // that just means that there is *temporarily* no data, but to go
+ // ahead and try again later.
+ //
+ // note that this is very unusual. it only works for crypto streams
+ // because the other side of the stream will call read(0) to cycle
+ // data through openssl. that's why we set the timeouts to call
+ // r.read(0) again later, otherwise there is no more work being done
+ // and the process just exits.
+
+ var buf = new Buffer(5);
+ buf.fill('x');
+ var reads = 5;
+ r._read = function(n) {
+ switch (reads--) {
+ case 0:
+ return r.push(null); // EOF
+ case 1:
+ return r.push(buf);
+ case 2:
+ setTimeout(r.read.bind(r, 0), 50);
+ return r.push(new Buffer(0)); // Not-EOF!
+ case 3:
+ setTimeout(r.read.bind(r, 0), 50);
+ return process.nextTick(function() {
+ return r.push(new Buffer(0));
+ });
+ case 4:
+ setTimeout(r.read.bind(r, 0), 50);
+ return setTimeout(function() {
+ return r.push(new Buffer(0));
+ });
+ case 5:
+ return setTimeout(function() {
+ return r.push(buf);
+ });
+ default:
+ throw new Error('unreachable');
+ }
+ };
+
+ var results = [];
+ function flow() {
+ var chunk;
+ while (null !== (chunk = r.read()))
+ results.push(chunk + '');
+ }
+ r.on('readable', flow);
+ r.on('end', function() {
+ results.push('EOF');
+ });
+ flow();
+
+ process.on('exit', function() {
+ assert.deepEqual(results, [ 'xxxxx', 'xxxxx', 'EOF' ]);
+ console.log('ok');
+ });
+}
+
+function test2() {
+ var r = new Readable({ encoding: 'base64' });
+ var reads = 5;
+ r._read = function(n) {
+ if (!reads--)
+ return r.push(null); // EOF
+ else
+ return r.push(new Buffer('x'));
+ };
+
+ var results = [];
+ function flow() {
+ var chunk;
+ while (null !== (chunk = r.read()))
+ results.push(chunk + '');
+ }
+ r.on('readable', flow);
+ r.on('end', function() {
+ results.push('EOF');
+ });
+ flow();
+
+ process.on('exit', function() {
+ assert.deepEqual(results, [ 'eHh4', 'eHg=', 'EOF' ]);
+ console.log('ok');
+ });
+}
diff --git a/test/parallel/test-stream2-readable-from-list.js b/test/parallel/test-stream2-readable-from-list.js
new file mode 100644
index 0000000..145cbbc
--- /dev/null
+++ b/test/parallel/test-stream2-readable-from-list.js
@@ -0,0 +1,99 @@
+'use strict';
+var assert = require('assert');
+var common = require('../common');
+var fromList = require('../../lib/_stream_readable')._fromList;
+
+// tiny node-tap lookalike.
+var tests = [];
+var count = 0;
+
+function test(name, fn) {
+ count++;
+ tests.push([name, fn]);
+}
+
+function run() {
+ var next = tests.shift();
+ if (!next)
+ return console.error('ok');
+
+ var name = next[0];
+ var fn = next[1];
+ console.log('# %s', name);
+ fn({
+ same: assert.deepEqual,
+ equal: assert.equal,
+ end: function() {
+ count--;
+ run();
+ }
+ });
+}
+
+// ensure all tests have run
+process.on('exit', function() {
+ assert.equal(count, 0);
+});
+
+process.nextTick(run);
+
+
+test('buffers', function(t) {
+ // have a length
+ var len = 16;
+ var list = [ new Buffer('foog'),
+ new Buffer('bark'),
+ new Buffer('bazy'),
+ new Buffer('kuel') ];
+
+ // read more than the first element.
+ var ret = fromList(6, { buffer: list, length: 16 });
+ t.equal(ret.toString(), 'foogba');
+
+ // read exactly the first element.
+ ret = fromList(2, { buffer: list, length: 10 });
+ t.equal(ret.toString(), 'rk');
+
+ // read less than the first element.
+ ret = fromList(2, { buffer: list, length: 8 });
+ t.equal(ret.toString(), 'ba');
+
+ // read more than we have.
+ ret = fromList(100, { buffer: list, length: 6 });
+ t.equal(ret.toString(), 'zykuel');
+
+ // all consumed.
+ t.same(list, []);
+
+ t.end();
+});
+
+test('strings', function(t) {
+ // have a length
+ var len = 16;
+ var list = [ 'foog',
+ 'bark',
+ 'bazy',
+ 'kuel' ];
+
+ // read more than the first element.
+ var ret = fromList(6, { buffer: list, length: 16, decoder: true });
+ t.equal(ret, 'foogba');
+
+ // read exactly the first element.
+ ret = fromList(2, { buffer: list, length: 10, decoder: true });
+ t.equal(ret, 'rk');
+
+ // read less than the first element.
+ ret = fromList(2, { buffer: list, length: 8, decoder: true });
+ t.equal(ret, 'ba');
+
+ // read more than we have.
+ ret = fromList(100, { buffer: list, length: 6, decoder: true });
+ t.equal(ret, 'zykuel');
+
+ // all consumed.
+ t.same(list, []);
+
+ t.end();
+});
diff --git a/test/parallel/test-stream2-readable-legacy-drain.js b/test/parallel/test-stream2-readable-legacy-drain.js
new file mode 100644
index 0000000..cbbde51
--- /dev/null
+++ b/test/parallel/test-stream2-readable-legacy-drain.js
@@ -0,0 +1,55 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Stream = require('../../');
+var Readable = require('../../').Readable;
+
+var r = new Readable();
+var N = 256;
+var reads = 0;
+r._read = function(n) {
+ return r.push(++reads === N ? null : new Buffer(1));
+};
+
+var rended = false;
+r.on('end', function() {
+ rended = true;
+});
+
+var w = new Stream();
+w.writable = true;
+var writes = 0;
+var buffered = 0;
+w.write = function(c) {
+ writes += c.length;
+ buffered += c.length;
+ process.nextTick(drain);
+ return false;
+};
+
+function drain() {
+ assert(buffered <= 3);
+ buffered = 0;
+ w.emit('drain');
+}
+
+
+var wended = false;
+w.end = function() {
+ wended = true;
+};
+
+// Just for kicks, let's mess with the drain count.
+// This verifies that even if it gets negative in the
+// pipe() cleanup function, we'll still function properly.
+r.on('readable', function() {
+ w.emit('drain');
+});
+
+r.pipe(w);
+process.on('exit', function() {
+ assert(rended);
+ assert(wended);
+ console.error('ok');
+});
diff --git a/test/parallel/test-stream2-readable-non-empty-end.js b/test/parallel/test-stream2-readable-non-empty-end.js
new file mode 100644
index 0000000..5d3812b
--- /dev/null
+++ b/test/parallel/test-stream2-readable-non-empty-end.js
@@ -0,0 +1,58 @@
+'use strict';
+var assert = require('assert');
+var common = require('../common');
+var Readable = require('../../lib/_stream_readable');
+
+var len = 0;
+var chunks = new Array(10);
+for (var i = 1; i <= 10; i++) {
+ chunks[i - 1] = new Buffer(i);
+ len += i;
+}
+
+var test = new Readable();
+var n = 0;
+test._read = function(size) {
+ var chunk = chunks[n++];
+ setTimeout(function() {
+ test.push(chunk === undefined ? null : chunk);
+ });
+};
+
+test.on('end', thrower);
+function thrower() {
+ throw new Error('this should not happen!');
+}
+
+var bytesread = 0;
+test.on('readable', function() {
+ var b = len - bytesread - 1;
+ var res = test.read(b);
+ if (res) {
+ bytesread += res.length;
+ console.error('br=%d len=%d', bytesread, len);
+ setTimeout(next);
+ }
+ test.read(0);
+});
+test.read(0);
+
+function next() {
+ // now let's make 'end' happen
+ test.removeListener('end', thrower);
+
+ var endEmitted = false;
+ process.on('exit', function() {
+ assert(endEmitted, 'end should be emitted by now');
+ });
+ test.on('end', function() {
+ endEmitted = true;
+ });
+
+ // one to get the last byte
+ var r = test.read();
+ assert(r);
+ assert.equal(r.length, 1);
+ r = test.read();
+ assert.equal(r, null);
+}
diff --git a/test/parallel/test-stream2-readable-wrap-empty.js b/test/parallel/test-stream2-readable-wrap-empty.js
new file mode 100644
index 0000000..0bcd48c
--- /dev/null
+++ b/test/parallel/test-stream2-readable-wrap-empty.js
@@ -0,0 +1,23 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Readable = require('../../lib/_stream_readable');
+var EE = require('events').EventEmitter;
+
+var oldStream = new EE();
+oldStream.pause = function() {};
+oldStream.resume = function() {};
+
+var newStream = new Readable().wrap(oldStream);
+
+var ended = false;
+newStream
+ .on('readable', function() {})
+ .on('end', function() { ended = true; });
+
+oldStream.emit('end');
+
+process.on('exit', function() {
+ assert.ok(ended);
+});
diff --git a/test/parallel/test-stream2-readable-wrap.js b/test/parallel/test-stream2-readable-wrap.js
new file mode 100644
index 0000000..e466e75
--- /dev/null
+++ b/test/parallel/test-stream2-readable-wrap.js
@@ -0,0 +1,90 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var Readable = require('../../lib/_stream_readable');
+var Writable = require('../../lib/_stream_writable');
+var EE = require('events').EventEmitter;
+
+var testRuns = 0, completedRuns = 0;
+function runTest(highWaterMark, objectMode, produce) {
+ testRuns++;
+
+ var old = new EE();
+ var r = new Readable({ highWaterMark: highWaterMark,
+ objectMode: objectMode });
+ assert.equal(r, r.wrap(old));
+
+ var ended = false;
+ r.on('end', function() {
+ ended = true;
+ });
+
+ old.pause = function() {
+ console.error('old.pause()');
+ old.emit('pause');
+ flowing = false;
+ };
+
+ old.resume = function() {
+ console.error('old.resume()');
+ old.emit('resume');
+ flow();
+ };
+
+ var flowing;
+ var chunks = 10;
+ var oldEnded = false;
+ var expected = [];
+ function flow() {
+ flowing = true;
+ while (flowing && chunks-- > 0) {
+ var item = produce();
+ expected.push(item);
+ console.log('old.emit', chunks, flowing);
+ old.emit('data', item);
+ console.log('after emit', chunks, flowing);
+ }
+ if (chunks <= 0) {
+ oldEnded = true;
+ console.log('old end', chunks, flowing);
+ old.emit('end');
+ }
+ }
+
+ var w = new Writable({ highWaterMark: highWaterMark * 2,
+ objectMode: objectMode });
+ var written = [];
+ w._write = function(chunk, encoding, cb) {
+ console.log('_write', chunk);
+ written.push(chunk);
+ setTimeout(cb);
+ };
+
+ w.on('finish', function() {
+ completedRuns++;
+ performAsserts();
+ });
+
+ r.pipe(w);
+
+ flow();
+
+ function performAsserts() {
+ assert(ended);
+ assert(oldEnded);
+ assert.deepEqual(written, expected);
+ }
+}
+
+runTest(100, false, function() { return new Buffer(100); });
+runTest(10, false, function() { return new Buffer('xxxxxxxxxx'); });
+runTest(1, true, function() { return { foo: 'bar' }; });
+
+var objectChunks = [ 5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555 ];
+runTest(1, true, function() { return objectChunks.shift(); });
+
+process.on('exit', function() {
+ assert.equal(testRuns, completedRuns);
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream2-set-encoding.js b/test/parallel/test-stream2-set-encoding.js
new file mode 100644
index 0000000..937a377
--- /dev/null
+++ b/test/parallel/test-stream2-set-encoding.js
@@ -0,0 +1,346 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+var R = require('../../lib/_stream_readable');
+var util = require('util');
+
+// tiny node-tap lookalike.
+var tests = [];
+var count = 0;
+
+function test(name, fn) {
+ count++;
+ tests.push([name, fn]);
+}
+
+function run() {
+ var next = tests.shift();
+ if (!next)
+ return console.error('ok');
+
+ var name = next[0];
+ var fn = next[1];
+ console.log('# %s', name);
+ fn({
+ same: assert.deepEqual,
+ equal: assert.equal,
+ end: function() {
+ count--;
+ run();
+ }
+ });
+}
+
+// ensure all tests have run
+process.on('exit', function() {
+ assert.equal(count, 0);
+});
+
+process.nextTick(run);
+
+/////
+
+util.inherits(TestReader, R);
+
+function TestReader(n, opts) {
+ R.call(this, opts);
+
+ this.pos = 0;
+ this.len = n || 100;
+}
+
+TestReader.prototype._read = function(n) {
+ setTimeout(function() {
+
+ if (this.pos >= this.len) {
+ // double push(null) to test eos handling
+ this.push(null);
+ return this.push(null);
+ }
+
+ n = Math.min(n, this.len - this.pos);
+ if (n <= 0) {
+ // double push(null) to test eos handling
+ this.push(null);
+ return this.push(null);
+ }
+
+ this.pos += n;
+ var ret = new Buffer(n);
+ ret.fill('a');
+
+ console.log('this.push(ret)', ret);
+
+ return this.push(ret);
+ }.bind(this), 1);
+};
+
+test('setEncoding utf8', function(t) {
+ var tr = new TestReader(100);
+ tr.setEncoding('utf8');
+ var out = [];
+ var expect =
+ [ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+});
+
+
+test('setEncoding hex', function(t) {
+ var tr = new TestReader(100);
+ tr.setEncoding('hex');
+ var out = [];
+ var expect =
+ [ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+});
+
+test('setEncoding hex with read(13)', function(t) {
+ var tr = new TestReader(100);
+ tr.setEncoding('hex');
+ var out = [];
+ var expect =
+ [ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161' ];
+
+ tr.on('readable', function flow() {
+ console.log('readable once');
+ var chunk;
+ while (null !== (chunk = tr.read(13)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ console.log('END');
+ t.same(out, expect);
+ t.end();
+ });
+});
+
+test('setEncoding base64', function(t) {
+ var tr = new TestReader(100);
+ tr.setEncoding('base64');
+ var out = [];
+ var expect =
+ [ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ==' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+});
+
+test('encoding: utf8', function(t) {
+ var tr = new TestReader(100, { encoding: 'utf8' });
+ var out = [];
+ var expect =
+ [ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+});
+
+
+test('encoding: hex', function(t) {
+ var tr = new TestReader(100, { encoding: 'hex' });
+ var out = [];
+ var expect =
+ [ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+});
+
+test('encoding: hex with read(13)', function(t) {
+ var tr = new TestReader(100, { encoding: 'hex' });
+ var out = [];
+ var expect =
+ [ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(13)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+});
+
+test('encoding: base64', function(t) {
+ var tr = new TestReader(100, { encoding: 'base64' });
+ var out = [];
+ var expect =
+ [ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ==' ];
+
+ tr.on('readable', function flow() {
+ var chunk;
+ while (null !== (chunk = tr.read(10)))
+ out.push(chunk);
+ });
+
+ tr.on('end', function() {
+ t.same(out, expect);
+ t.end();
+ });
+});
+
+test('chainable', function(t) {
+ var tr = new TestReader(100);
+ t.equal(tr.setEncoding('utf8'), tr);
+ t.end();
+});
diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js
new file mode 100644
index 0000000..20873b1
--- /dev/null
+++ b/test/parallel/test-stream2-transform.js
@@ -0,0 +1,508 @@
+'use strict';
+var assert = require('assert');
+var common = require('../common');
+var PassThrough = require('../../lib/_stream_passthrough');
+var Transform = require('../../lib/_stream_transform');
+
+// tiny node-tap lookalike.
+var tests = [];
+var count = 0;
+
+function test(name, fn) {
+ count++;
+ tests.push([name, fn]);
+}
+
+function run() {
+ var next = tests.shift();
+ if (!next)
+ return console.error('ok');
+
+ var name = next[0];
+ var fn = next[1];
+ console.log('# %s', name);
+ fn({
+ same: assert.deepEqual,
+ equal: assert.equal,
+ ok: assert,
+ end: function() {
+ count--;
+ run();
+ }
+ });
+}
+
+// ensure all tests have run
+process.on('exit', function() {
+ assert.equal(count, 0);
+});
+
+process.nextTick(run);
+
+/////
+
+test('writable side consumption', function(t) {
+ var tx = new Transform({
+ highWaterMark: 10
+ });
+
+ var transformed = 0;
+ tx._transform = function(chunk, encoding, cb) {
+ transformed += chunk.length;
+ tx.push(chunk);
+ cb();
+ };
+
+ for (var i = 1; i <= 10; i++) {
+ tx.write(new Buffer(i));
+ }
+ tx.end();
+
+ t.equal(tx._readableState.length, 10);
+ t.equal(transformed, 10);
+ t.equal(tx._transformState.writechunk.length, 5);
+ t.same(tx._writableState.getBuffer().map(function(c) {
+ return c.chunk.length;
+ }), [6, 7, 8, 9, 10]);
+
+ t.end();
+});
+
+test('passthrough', function(t) {
+ var pt = new PassThrough();
+
+ pt.write(new Buffer('foog'));
+ pt.write(new Buffer('bark'));
+ pt.write(new Buffer('bazy'));
+ pt.write(new Buffer('kuel'));
+ pt.end();
+
+ t.equal(pt.read(5).toString(), 'foogb');
+ t.equal(pt.read(5).toString(), 'arkba');
+ t.equal(pt.read(5).toString(), 'zykue');
+ t.equal(pt.read(5).toString(), 'l');
+ t.end();
+});
+
+test('object passthrough', function(t) {
+ var pt = new PassThrough({ objectMode: true });
+
+ pt.write(1);
+ pt.write(true);
+ pt.write(false);
+ pt.write(0);
+ pt.write('foo');
+ pt.write('');
+ pt.write({ a: 'b'});
+ pt.end();
+
+ t.equal(pt.read(), 1);
+ t.equal(pt.read(), true);
+ t.equal(pt.read(), false);
+ t.equal(pt.read(), 0);
+ t.equal(pt.read(), 'foo');
+ t.equal(pt.read(), '');
+ t.same(pt.read(), { a: 'b'});
+ t.end();
+});
+
+test('simple transform', function(t) {
+ var pt = new Transform();
+ pt._transform = function(c, e, cb) {
+ var ret = new Buffer(c.length);
+ ret.fill('x');
+ pt.push(ret);
+ cb();
+ };
+
+ pt.write(new Buffer('foog'));
+ pt.write(new Buffer('bark'));
+ pt.write(new Buffer('bazy'));
+ pt.write(new Buffer('kuel'));
+ pt.end();
+
+ t.equal(pt.read(5).toString(), 'xxxxx');
+ t.equal(pt.read(5).toString(), 'xxxxx');
+ t.equal(pt.read(5).toString(), 'xxxxx');
+ t.equal(pt.read(5).toString(), 'x');
+ t.end();
+});
+
+test('simple object transform', function(t) {
+ var pt = new Transform({ objectMode: true });
+ pt._transform = function(c, e, cb) {
+ pt.push(JSON.stringify(c));
+ cb();
+ };
+
+ pt.write(1);
+ pt.write(true);
+ pt.write(false);
+ pt.write(0);
+ pt.write('foo');
+ pt.write('');
+ pt.write({ a: 'b'});
+ pt.end();
+
+ t.equal(pt.read(), '1');
+ t.equal(pt.read(), 'true');
+ t.equal(pt.read(), 'false');
+ t.equal(pt.read(), '0');
+ t.equal(pt.read(), '"foo"');
+ t.equal(pt.read(), '""');
+ t.equal(pt.read(), '{"a":"b"}');
+ t.end();
+});
+
+test('async passthrough', function(t) {
+ var pt = new Transform();
+ pt._transform = function(chunk, encoding, cb) {
+ setTimeout(function() {
+ pt.push(chunk);
+ cb();
+ }, 10);
+ };
+
+ pt.write(new Buffer('foog'));
+ pt.write(new Buffer('bark'));
+ pt.write(new Buffer('bazy'));
+ pt.write(new Buffer('kuel'));
+ pt.end();
+
+ pt.on('finish', function() {
+ t.equal(pt.read(5).toString(), 'foogb');
+ t.equal(pt.read(5).toString(), 'arkba');
+ t.equal(pt.read(5).toString(), 'zykue');
+ t.equal(pt.read(5).toString(), 'l');
+ t.end();
+ });
+});
+
+test('assymetric transform (expand)', function(t) {
+ var pt = new Transform();
+
+ // emit each chunk 2 times.
+ pt._transform = function(chunk, encoding, cb) {
+ setTimeout(function() {
+ pt.push(chunk);
+ setTimeout(function() {
+ pt.push(chunk);
+ cb();
+ }, 10);
+ }, 10);
+ };
+
+ pt.write(new Buffer('foog'));
+ pt.write(new Buffer('bark'));
+ pt.write(new Buffer('bazy'));
+ pt.write(new Buffer('kuel'));
+ pt.end();
+
+ pt.on('finish', function() {
+ t.equal(pt.read(5).toString(), 'foogf');
+ t.equal(pt.read(5).toString(), 'oogba');
+ t.equal(pt.read(5).toString(), 'rkbar');
+ t.equal(pt.read(5).toString(), 'kbazy');
+ t.equal(pt.read(5).toString(), 'bazyk');
+ t.equal(pt.read(5).toString(), 'uelku');
+ t.equal(pt.read(5).toString(), 'el');
+ t.end();
+ });
+});
+
+test('assymetric transform (compress)', function(t) {
+ var pt = new Transform();
+
+ // each output is the first char of 3 consecutive chunks,
+ // or whatever's left.
+ pt.state = '';
+
+ pt._transform = function(chunk, encoding, cb) {
+ if (!chunk)
+ chunk = '';
+ var s = chunk.toString();
+ setTimeout(function() {
+ this.state += s.charAt(0);
+ if (this.state.length === 3) {
+ pt.push(new Buffer(this.state));
+ this.state = '';
+ }
+ cb();
+ }.bind(this), 10);
+ };
+
+ pt._flush = function(cb) {
+ // just output whatever we have.
+ pt.push(new Buffer(this.state));
+ this.state = '';
+ cb();
+ };
+
+ pt.write(new Buffer('aaaa'));
+ pt.write(new Buffer('bbbb'));
+ pt.write(new Buffer('cccc'));
+ pt.write(new Buffer('dddd'));
+ pt.write(new Buffer('eeee'));
+ pt.write(new Buffer('aaaa'));
+ pt.write(new Buffer('bbbb'));
+ pt.write(new Buffer('cccc'));
+ pt.write(new Buffer('dddd'));
+ pt.write(new Buffer('eeee'));
+ pt.write(new Buffer('aaaa'));
+ pt.write(new Buffer('bbbb'));
+ pt.write(new Buffer('cccc'));
+ pt.write(new Buffer('dddd'));
+ pt.end();
+
+ // 'abcdeabcdeabcd'
+ pt.on('finish', function() {
+ t.equal(pt.read(5).toString(), 'abcde');
+ t.equal(pt.read(5).toString(), 'abcde');
+ t.equal(pt.read(5).toString(), 'abcd');
+ t.end();
+ });
+});
+
+// this tests for a stall when data is written to a full stream
+// that has empty transforms.
+test('complex transform', function(t) {
+ var count = 0;
+ var saved = null;
+ var pt = new Transform({highWaterMark:3});
+ pt._transform = function(c, e, cb) {
+ if (count++ === 1)
+ saved = c;
+ else {
+ if (saved) {
+ pt.push(saved);
+ saved = null;
+ }
+ pt.push(c);
+ }
+
+ cb();
+ };
+
+ pt.once('readable', function() {
+ process.nextTick(function() {
+ pt.write(new Buffer('d'));
+ pt.write(new Buffer('ef'), function() {
+ pt.end();
+ t.end();
+ });
+ t.equal(pt.read().toString(), 'abcdef');
+ t.equal(pt.read(), null);
+ });
+ });
+
+ pt.write(new Buffer('abc'));
+});
+
+
+test('passthrough event emission', function(t) {
+ var pt = new PassThrough();
+ var emits = 0;
+ pt.on('readable', function() {
+ var state = pt._readableState;
+ console.error('>>> emit readable %d', emits);
+ emits++;
+ });
+
+ var i = 0;
+
+ pt.write(new Buffer('foog'));
+
+ console.error('need emit 0');
+ pt.write(new Buffer('bark'));
+
+ console.error('should have emitted readable now 1 === %d', emits);
+ t.equal(emits, 1);
+
+ t.equal(pt.read(5).toString(), 'foogb');
+ t.equal(pt.read(5) + '', 'null');
+
+ console.error('need emit 1');
+
+ pt.write(new Buffer('bazy'));
+ console.error('should have emitted, but not again');
+ pt.write(new Buffer('kuel'));
+
+ console.error('should have emitted readable now 2 === %d', emits);
+ t.equal(emits, 2);
+
+ t.equal(pt.read(5).toString(), 'arkba');
+ t.equal(pt.read(5).toString(), 'zykue');
+ t.equal(pt.read(5), null);
+
+ console.error('need emit 2');
+
+ pt.end();
+
+ t.equal(emits, 3);
+
+ t.equal(pt.read(5).toString(), 'l');
+ t.equal(pt.read(5), null);
+
+ console.error('should not have emitted again');
+ t.equal(emits, 3);
+ t.end();
+});
+
+test('passthrough event emission reordered', function(t) {
+ var pt = new PassThrough();
+ var emits = 0;
+ pt.on('readable', function() {
+ console.error('emit readable', emits);
+ emits++;
+ });
+
+ pt.write(new Buffer('foog'));
+ console.error('need emit 0');
+ pt.write(new Buffer('bark'));
+ console.error('should have emitted readable now 1 === %d', emits);
+ t.equal(emits, 1);
+
+ t.equal(pt.read(5).toString(), 'foogb');
+ t.equal(pt.read(5), null);
+
+ console.error('need emit 1');
+ pt.once('readable', function() {
+ t.equal(pt.read(5).toString(), 'arkba');
+
+ t.equal(pt.read(5), null);
+
+ console.error('need emit 2');
+ pt.once('readable', function() {
+ t.equal(pt.read(5).toString(), 'zykue');
+ t.equal(pt.read(5), null);
+ pt.once('readable', function() {
+ t.equal(pt.read(5).toString(), 'l');
+ t.equal(pt.read(5), null);
+ t.equal(emits, 4);
+ t.end();
+ });
+ pt.end();
+ });
+ pt.write(new Buffer('kuel'));
+ });
+
+ pt.write(new Buffer('bazy'));
+});
+
+test('passthrough facaded', function(t) {
+ console.error('passthrough facaded');
+ var pt = new PassThrough();
+ var datas = [];
+ pt.on('data', function(chunk) {
+ datas.push(chunk.toString());
+ });
+
+ pt.on('end', function() {
+ t.same(datas, ['foog', 'bark', 'bazy', 'kuel']);
+ t.end();
+ });
+
+ pt.write(new Buffer('foog'));
+ setTimeout(function() {
+ pt.write(new Buffer('bark'));
+ setTimeout(function() {
+ pt.write(new Buffer('bazy'));
+ setTimeout(function() {
+ pt.write(new Buffer('kuel'));
+ setTimeout(function() {
+ pt.end();
+ }, 10);
+ }, 10);
+ }, 10);
+ }, 10);
+});
+
+test('object transform (json parse)', function(t) {
+ console.error('json parse stream');
+ var jp = new Transform({ objectMode: true });
+ jp._transform = function(data, encoding, cb) {
+ try {
+ jp.push(JSON.parse(data));
+ cb();
+ } catch (er) {
+ cb(er);
+ }
+ };
+
+ // anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ var objects = [
+ { foo: 'bar' },
+ 100,
+ 'string',
+ { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }
+ ];
+
+ var ended = false;
+ jp.on('end', function() {
+ ended = true;
+ });
+
+ forEach(objects, function(obj) {
+ jp.write(JSON.stringify(obj));
+ var res = jp.read();
+ t.same(res, obj);
+ });
+
+ jp.end();
+ // read one more time to get the 'end' event
+ jp.read();
+
+ process.nextTick(function() {
+ t.ok(ended);
+ t.end();
+ });
+});
+
+test('object transform (json stringify)', function(t) {
+ console.error('json parse stream');
+ var js = new Transform({ objectMode: true });
+ js._transform = function(data, encoding, cb) {
+ try {
+ js.push(JSON.stringify(data));
+ cb();
+ } catch (er) {
+ cb(er);
+ }
+ };
+
+ // anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ var objects = [
+ { foo: 'bar' },
+ 100,
+ 'string',
+ { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }
+ ];
+
+ var ended = false;
+ js.on('end', function() {
+ ended = true;
+ });
+
+ forEach(objects, function(obj) {
+ js.write(obj);
+ var res = js.read();
+ t.equal(res, JSON.stringify(obj));
+ });
+
+ js.end();
+ // read one more time to get the 'end' event
+ js.read();
+
+ process.nextTick(function() {
+ t.ok(ended);
+ t.end();
+ });
+});
+
+function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+}
diff --git a/test/parallel/test-stream2-unpipe-drain.js b/test/parallel/test-stream2-unpipe-drain.js
new file mode 100644
index 0000000..1a7b85a
--- /dev/null
+++ b/test/parallel/test-stream2-unpipe-drain.js
@@ -0,0 +1,60 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+var stream = require('../../');
+
+if (!common.hasCrypto) {
+ console.log('1..0 # Skipped: missing crypto');
+ return;
+}
+var crypto = require('crypto');
+
+var util = require('util');
+
+function TestWriter() {
+ stream.Writable.call(this);
+}
+util.inherits(TestWriter, stream.Writable);
+
+TestWriter.prototype._write = function(buffer, encoding, callback) {
+ console.log('write called');
+ // super slow write stream (callback never called)
+};
+
+var dest = new TestWriter();
+
+function TestReader(id) {
+ stream.Readable.call(this);
+ this.reads = 0;
+}
+util.inherits(TestReader, stream.Readable);
+
+TestReader.prototype._read = function(size) {
+ this.reads += 1;
+ this.push(crypto.randomBytes(size));
+};
+
+var src1 = new TestReader();
+var src2 = new TestReader();
+
+src1.pipe(dest);
+
+src1.once('readable', function() {
+ process.nextTick(function() {
+
+ src2.pipe(dest);
+
+ src2.once('readable', function() {
+ process.nextTick(function() {
+
+ src1.unpipe(dest);
+ });
+ });
+ });
+});
+
+
+process.on('exit', function() {
+ assert.equal(src1.reads, 2);
+ assert.equal(src2.reads, 2);
+});
diff --git a/test/parallel/test-stream2-unpipe-leak.js b/test/parallel/test-stream2-unpipe-leak.js
new file mode 100644
index 0000000..4677b57
--- /dev/null
+++ b/test/parallel/test-stream2-unpipe-leak.js
@@ -0,0 +1,54 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+var stream = require('../../');
+
+var chunk = new Buffer('hallo');
+
+var util = require('util');
+
+function TestWriter() {
+ stream.Writable.call(this);
+}
+util.inherits(TestWriter, stream.Writable);
+
+TestWriter.prototype._write = function(buffer, encoding, callback) {
+ callback(null);
+};
+
+var dest = new TestWriter();
+
+// Set this high so that we'd trigger a nextTick warning
+// and/or RangeError if we do maybeReadMore wrong.
+function TestReader() {
+ stream.Readable.call(this, { highWaterMark: 0x10000 });
+}
+util.inherits(TestReader, stream.Readable);
+
+TestReader.prototype._read = function(size) {
+ this.push(chunk);
+};
+
+var src = new TestReader();
+
+for (var i = 0; i < 10; i++) {
+ src.pipe(dest);
+ src.unpipe(dest);
+}
+
+assert.equal(src.listeners('end').length, 0);
+assert.equal(src.listeners('readable').length, 0);
+
+assert.equal(dest.listeners('unpipe').length, 0);
+assert.equal(dest.listeners('drain').length, 0);
+assert.equal(dest.listeners('error').length, 0);
+assert.equal(dest.listeners('close').length, 0);
+assert.equal(dest.listeners('finish').length, 0);
+
+console.error(src._readableState);
+process.on('exit', function() {
+ src._readableState.buffer.length = 0;
+ console.error(src._readableState);
+ assert(src._readableState.length >= src._readableState.highWaterMark);
+ console.log('ok');
+});
diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js
new file mode 100644
index 0000000..2b92fbf
--- /dev/null
+++ b/test/parallel/test-stream2-writable.js
@@ -0,0 +1,391 @@
+'use strict';
+var common = require('../common');
+var W = require('../../lib/_stream_writable');
+var D = require('../../lib/_stream_duplex');
+var assert = require('assert');
+
+var util = require('util');
+util.inherits(TestWriter, W);
+
+function TestWriter() {
+ W.apply(this, arguments);
+ this.buffer = [];
+ this.written = 0;
+}
+
+TestWriter.prototype._write = function(chunk, encoding, cb) {
+ // simulate a small unpredictable latency
+ setTimeout(function() {
+ this.buffer.push(chunk.toString());
+ this.written += chunk.length;
+ cb();
+ }.bind(this), Math.floor(Math.random() * 10));
+};
+
+var chunks = new Array(50);
+for (var i = 0; i < chunks.length; i++) {
+ chunks[i] = new Array(i + 1).join('x');
+}
+
+// tiny node-tap lookalike.
+var tests = [];
+var count = 0;
+
+function test(name, fn) {
+ count++;
+ tests.push([name, fn]);
+}
+
+function run() {
+ var next = tests.shift();
+ if (!next)
+ return console.error('ok');
+
+ var name = next[0];
+ var fn = next[1];
+ console.log('# %s', name);
+ fn({
+ same: assert.deepEqual,
+ equal: assert.equal,
+ end: function() {
+ count--;
+ run();
+ }
+ });
+}
+
+// ensure all tests have run
+process.on('exit', function() {
+ assert.equal(count, 0);
+});
+
+process.nextTick(run);
+
+test('write fast', function(t) {
+ var tw = new TestWriter({
+ highWaterMark: 100
+ });
+
+ tw.on('finish', function() {
+ t.same(tw.buffer, chunks, 'got chunks in the right order');
+ t.end();
+ });
+
+ forEach(chunks, function(chunk) {
+ // screw backpressure. Just buffer it all up.
+ tw.write(chunk);
+ });
+ tw.end();
+});
+
+test('write slow', function(t) {
+ var tw = new TestWriter({
+ highWaterMark: 100
+ });
+
+ tw.on('finish', function() {
+ t.same(tw.buffer, chunks, 'got chunks in the right order');
+ t.end();
+ });
+
+ var i = 0;
+ (function W() {
+ tw.write(chunks[i++]);
+ if (i < chunks.length)
+ setTimeout(W, 10);
+ else
+ tw.end();
+ })();
+});
+
+test('write backpressure', function(t) {
+ var tw = new TestWriter({
+ highWaterMark: 50
+ });
+
+ var drains = 0;
+
+ tw.on('finish', function() {
+ t.same(tw.buffer, chunks, 'got chunks in the right order');
+ t.equal(drains, 17);
+ t.end();
+ });
+
+ tw.on('drain', function() {
+ drains++;
+ });
+
+ var i = 0;
+ (function W() {
+ do {
+ var ret = tw.write(chunks[i++]);
+ } while (ret !== false && i < chunks.length);
+
+ if (i < chunks.length) {
+ assert(tw._writableState.length >= 50);
+ tw.once('drain', W);
+ } else {
+ tw.end();
+ }
+ })();
+});
+
+test('write bufferize', function(t) {
+ var tw = new TestWriter({
+ highWaterMark: 100
+ });
+
+ var encodings =
+ [ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined ];
+
+ tw.on('finish', function() {
+ t.same(tw.buffer, chunks, 'got the expected chunks');
+ });
+
+ forEach(chunks, function(chunk, i) {
+ var enc = encodings[ i % encodings.length ];
+ chunk = new Buffer(chunk);
+ tw.write(chunk.toString(enc), enc);
+ });
+ t.end();
+});
+
+test('write no bufferize', function(t) {
+ var tw = new TestWriter({
+ highWaterMark: 100,
+ decodeStrings: false
+ });
+
+ tw._write = function(chunk, encoding, cb) {
+ assert(typeof chunk === 'string');
+ chunk = new Buffer(chunk, encoding);
+ return TestWriter.prototype._write.call(this, chunk, encoding, cb);
+ };
+
+ var encodings =
+ [ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined ];
+
+ tw.on('finish', function() {
+ t.same(tw.buffer, chunks, 'got the expected chunks');
+ });
+
+ forEach(chunks, function(chunk, i) {
+ var enc = encodings[ i % encodings.length ];
+ chunk = new Buffer(chunk);
+ tw.write(chunk.toString(enc), enc);
+ });
+ t.end();
+});
+
+test('write callbacks', function(t) {
+ var callbacks = chunks.map(function(chunk, i) {
+ return [i, function(er) {
+ callbacks._called[i] = chunk;
+ }];
+ }).reduce(function(set, x) {
+ set['callback-' + x[0]] = x[1];
+ return set;
+ }, {});
+ callbacks._called = [];
+
+ var tw = new TestWriter({
+ highWaterMark: 100
+ });
+
+ tw.on('finish', function() {
+ process.nextTick(function() {
+ t.same(tw.buffer, chunks, 'got chunks in the right order');
+ t.same(callbacks._called, chunks, 'called all callbacks');
+ t.end();
+ });
+ });
+
+ forEach(chunks, function(chunk, i) {
+ tw.write(chunk, callbacks['callback-' + i]);
+ });
+ tw.end();
+});
+
+test('end callback', function(t) {
+ var tw = new TestWriter();
+ tw.end(function() {
+ t.end();
+ });
+});
+
+test('end callback with chunk', function(t) {
+ var tw = new TestWriter();
+ tw.end(new Buffer('hello world'), function() {
+ t.end();
+ });
+});
+
+test('end callback with chunk and encoding', function(t) {
+ var tw = new TestWriter();
+ tw.end('hello world', 'ascii', function() {
+ t.end();
+ });
+});
+
+test('end callback after .write() call', function(t) {
+ var tw = new TestWriter();
+ tw.write(new Buffer('hello world'));
+ tw.end(function() {
+ t.end();
+ });
+});
+
+test('end callback called after write callback', function(t) {
+ var tw = new TestWriter();
+ var writeCalledback = false;
+ tw.write(new Buffer('hello world'), function() {
+ writeCalledback = true;
+ });
+ tw.end(function() {
+ t.equal(writeCalledback, true);
+ t.end();
+ });
+});
+
+test('encoding should be ignored for buffers', function(t) {
+ var tw = new W();
+ var hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb';
+ tw._write = function(chunk, encoding, cb) {
+ t.equal(chunk.toString('hex'), hex);
+ t.end();
+ };
+ var buf = new Buffer(hex, 'hex');
+ tw.write(buf, 'binary');
+});
+
+test('writables are not pipable', function(t) {
+ var w = new W();
+ w._write = function() {};
+ var gotError = false;
+ w.on('error', function(er) {
+ gotError = true;
+ });
+ w.pipe(process.stdout);
+ assert(gotError);
+ t.end();
+});
+
+test('duplexes are pipable', function(t) {
+ var d = new D();
+ d._read = function() {};
+ d._write = function() {};
+ var gotError = false;
+ d.on('error', function(er) {
+ gotError = true;
+ });
+ d.pipe(process.stdout);
+ assert(!gotError);
+ t.end();
+});
+
+test('end(chunk) two times is an error', function(t) {
+ var w = new W();
+ w._write = function() {};
+ var gotError = false;
+ w.on('error', function(er) {
+ gotError = true;
+ t.equal(er.message, 'write after end');
+ });
+ w.end('this is the end');
+ w.end('and so is this');
+ process.nextTick(function() {
+ assert(gotError);
+ t.end();
+ });
+});
+
+test('dont end while writing', function(t) {
+ var w = new W();
+ var wrote = false;
+ w._write = function(chunk, e, cb) {
+ assert(!this.writing);
+ wrote = true;
+ this.writing = true;
+ setTimeout(function() {
+ this.writing = false;
+ cb();
+ });
+ };
+ w.on('finish', function() {
+ assert(wrote);
+ t.end();
+ });
+ w.write(Buffer(0));
+ w.end();
+});
+
+test('finish does not come before write cb', function(t) {
+ var w = new W();
+ var writeCb = false;
+ w._write = function(chunk, e, cb) {
+ setTimeout(function() {
+ writeCb = true;
+ cb();
+ }, 10);
+ };
+ w.on('finish', function() {
+ assert(writeCb);
+ t.end();
+ });
+ w.write(Buffer(0));
+ w.end();
+});
+
+test('finish does not come before sync _write cb', function(t) {
+ var w = new W();
+ var writeCb = false;
+ w._write = function(chunk, e, cb) {
+ cb();
+ };
+ w.on('finish', function() {
+ assert(writeCb);
+ t.end();
+ });
+ w.write(Buffer(0), function(er) {
+ writeCb = true;
+ });
+ w.end();
+});
+
+test('finish is emitted if last chunk is empty', function(t) {
+ var w = new W();
+ w._write = function(chunk, e, cb) {
+ process.nextTick(cb);
+ };
+ w.on('finish', function() {
+ t.end();
+ });
+ w.write(Buffer(1));
+ w.end(Buffer(0));
+});
+
+function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+}
diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js
new file mode 100644
index 0000000..f6598ca
--- /dev/null
+++ b/test/parallel/test-stream3-pause-then-read.js
@@ -0,0 +1,147 @@
+'use strict';
+var common = require('../common');
+var assert = require('assert');
+
+var stream = require('../../');
+var Readable = stream.Readable;
+var Writable = stream.Writable;
+
+var totalChunks = 100;
+var chunkSize = 99;
+var expectTotalData = totalChunks * chunkSize;
+var expectEndingData = expectTotalData;
+
+var r = new Readable({ highWaterMark: 1000 });
+var chunks = totalChunks;
+r._read = function(n) {
+ if (!(chunks % 2))
+ setImmediate(push);
+ else if (!(chunks % 3))
+ process.nextTick(push);
+ else
+ push();
+};
+
+var totalPushed = 0;
+function push() {
+ var chunk = chunks-- > 0 ? new Buffer(chunkSize) : null;
+ if (chunk) {
+ totalPushed += chunk.length;
+ chunk.fill('x');
+ }
+ r.push(chunk);
+}
+
+read100();
+
+// first we read 100 bytes
+function read100() {
+ readn(100, onData);
+}
+
+function readn(n, then) {
+ console.error('read %d', n);
+ expectEndingData -= n;
+ ;(function read() {
+ var c = r.read(n);
+ if (!c)
+ r.once('readable', read);
+ else {
+ assert.equal(c.length, n);
+ assert(!r._readableState.flowing);
+ then();
+ }
+ })();
+}
+
+// then we listen to some data events
+function onData() {
+ expectEndingData -= 100;
+ console.error('onData');
+ var seen = 0;
+ r.on('data', function od(c) {
+ seen += c.length;
+ if (seen >= 100) {
+ // seen enough
+ r.removeListener('data', od);
+ r.pause();
+ if (seen > 100) {
+ // oh no, seen too much!
+ // put the extra back.
+ var diff = seen - 100;
+ r.unshift(c.slice(c.length - diff));
+ console.error('seen too much', seen, diff);
+ }
+
+ // Nothing should be lost in between
+ setImmediate(pipeLittle);
+ }
+ });
+}
+
+// Just pipe 200 bytes, then unshift the extra and unpipe
+function pipeLittle() {
+ expectEndingData -= 200;
+ console.error('pipe a little');
+ var w = new Writable();
+ var written = 0;
+ w.on('finish', function() {
+ assert.equal(written, 200);
+ setImmediate(read1234);
+ });
+ w._write = function(chunk, encoding, cb) {
+ written += chunk.length;
+ if (written >= 200) {
+ r.unpipe(w);
+ w.end();
+ cb();
+ if (written > 200) {
+ var diff = written - 200;
+ written -= diff;
+ r.unshift(chunk.slice(chunk.length - diff));
+ }
+ } else {
+ setImmediate(cb);
+ }
+ };
+ r.pipe(w);
+}
+
+// now read 1234 more bytes
+function read1234() {
+ readn(1234, resumePause);
+}
+
+function resumePause() {
+ console.error('resumePause');
+ // don't read anything, just resume and re-pause a whole bunch
+ r.resume();
+ r.pause();
+ r.resume();
+ r.pause();
+ r.resume();
+ r.pause();
+ r.resume();
+ r.pause();
+ r.resume();
+ r.pause();
+ setImmediate(pipe);
+}
+
+
+function pipe() {
+ console.error('pipe the rest');
+ var w = new Writable();
+ var written = 0;
+ w._write = function(chunk, encoding, cb) {
+ written += chunk.length;
+ cb();
+ };
+ w.on('finish', function() {
+ console.error('written', written, totalPushed);
+ assert.equal(written, expectEndingData);
+ assert.equal(totalPushed, expectTotalData);
+ console.log('ok');
+ });
+ r.pipe(w);
+}
diff --git a/transform.js b/transform.js
new file mode 100644
index 0000000..5d482f0
--- /dev/null
+++ b/transform.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_transform.js")
diff --git a/writable.js b/writable.js
new file mode 100644
index 0000000..e1e9efd
--- /dev/null
+++ b/writable.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_writable.js")
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/node-readable-stream.git
More information about the Pkg-javascript-commits
mailing list