[Pkg-javascript-commits] [node-browserify-zlib] 24/50: switch tests to original node, update binding.js
Bastien Roucariès
rouca at moszumanska.debian.org
Fri Dec 15 13:09:40 UTC 2017
This is an automated email from the git hooks/post-receive script.
rouca pushed a commit to branch master
in repository node-browserify-zlib.
commit 9730a3de982f69fdb169d7f0b663b1d79cc91244
Author: Jan Schär <jscissr at gmail.com>
Date: Wed Aug 10 20:52:25 2016 +0200
switch tests to original node, update binding.js
Tests now are the same as node.js core, except for two imports changed.
src/binding.js was also updated to current node core (v6.3.1).
---
karma.conf.js | 11 +-
package.json | 27 +-
src/binding.js | 94 ++++--
test/build.js | 67 +++++
test/common.js | 65 ++++
test/disabled-test-zlib-flush.js | 36 +++
...ed-test-zlib-from-gzip-with-trailing-garbage.js | 49 ++++
test/disabled-test-zlib-params.js | 34 +++
test/fixtures/empty.txt | 1 -
test/fixtures/pseudo-multimember-gzip.gz | Bin 0 -> 161 bytes
test/fixtures/pseudo-multimember-gzip.z | Bin 0 -> 148 bytes
test/test-zlib-close-after-error.js | 16 +
test/test-zlib-close-after-write.js | 24 +-
test/test-zlib-const.js | 24 +-
test/test-zlib-convenience-methods.js | 116 ++++----
test/test-zlib-dictionary-fail.js | 50 ++--
test/test-zlib-dictionary.js | 140 +++++----
test/test-zlib-flush-drain.js | 89 +++---
test/test-zlib-flush-flags.js | 28 ++
test/test-zlib-flush.js | 42 ---
test/test-zlib-from-concatenated-gzip.js | 100 +++++--
test/test-zlib-from-gzip-with-trailing-garbage.js | 59 ----
test/test-zlib-from-gzip.js | 55 ++--
test/test-zlib-from-string.js | 127 ++++----
test/test-zlib-invalid-input.js | 92 +++---
test/test-zlib-params.js | 40 ---
test/test-zlib-random-byte-pipes.js | 220 +++++++-------
test/test-zlib-sync-no-event.js | 21 ++
test/test-zlib-truncated.js | 120 ++++----
test/test-zlib-unzip-one-byte-chunks.js | 28 ++
test/test-zlib-write-after-close.js | 30 +-
test/test-zlib-write-after-flush.js | 72 +++--
test/test-zlib-zero-byte.js | 43 +--
test/test-zlib.js | 326 +++++++++++----------
34 files changed, 1229 insertions(+), 1017 deletions(-)
diff --git a/karma.conf.js b/karma.conf.js
index 50a418b..e00b23a 100644
--- a/karma.conf.js
+++ b/karma.conf.js
@@ -1,14 +1,7 @@
module.exports = function (karma) {
karma.set({
- frameworks: ['browserify', 'mocha'],
- files: ['test/**/test-*.js'],
- preprocessors: {
- 'test/**/test-*.js': ['browserify']
- },
- browserify: {
- debug: true,
- transform: ['brfs']
- },
+ frameworks: ['mocha'],
+ files: ['test/tmp/browserified.js'],
reporters: ['mocha-own'],
mochaOwnReporter: {
reporter: 'spec'
diff --git a/package.json b/package.json
index 580882d..431b656 100644
--- a/package.json
+++ b/package.json
@@ -14,32 +14,41 @@
"pako": "~1.0.1"
},
"devDependencies": {
+ "assert": "^1.4.1",
"babel-cli": "^6.11.4",
- "babel-preset-es2015": "^6.13.2",
+ "babel-plugin-transform-es2015-arrow-functions": "^6.8.0",
+ "babel-plugin-transform-es2015-block-scoping": "^6.10.1",
+ "babel-plugin-transform-es2015-template-literals": "^6.8.0",
+ "babelify": "^7.3.0",
"brfs": "^1.0.1",
+ "browserify": "^13.1.0",
+ "exec-glob": "^1.2.1",
+ "glob": "^7.0.5",
"karma": "^1.1.0",
- "karma-browserify": "^5.0.3",
"karma-chrome-launcher": "^1.0.1",
- "karma-firefox-launcher": "^0.1.7",
+ "karma-firefox-launcher": "^1.0.0",
"karma-mocha": "^1.0.1",
"karma-mocha-own-reporter": "^1.1.2",
"karma-phantomjs-launcher": "^1.0.0",
"mocha": "^3.0.1",
"phantomjs-prebuilt": "^2.1.7",
- "standard": "^6.0.8",
+ "standard": "^7.1.2",
"watchify": "^3.7.0"
},
"scripts": {
"build": "babel src --out-dir lib",
- "lint": "standard \"src/binding.js\" \"test/**/*.js\"",
+ "lint": "standard \"*.js\" \"!(node_modules|lib)/!(*test-zlib*|index).js\"",
"pretest": "npm run build",
"test": "npm run test:node && npm run test:browser",
- "test:browser": "karma start --single-run=true karma.conf.js",
- "test:node": "mocha test/test-*"
+ "test:node": "exec-glob node test/test-*",
+ "pretest:browser": "node test/build",
+ "test:browser": "karma start --single-run=true karma.conf.js"
},
"babel": {
- "presets": [
- "es2015"
+ "plugins": [
+ "transform-es2015-arrow-functions",
+ "transform-es2015-block-scoping",
+ "transform-es2015-template-literals"
]
},
"author": "Devon Govett <devongovett at gmail.com>",
diff --git a/src/binding.js b/src/binding.js
index d2ac7fd..bb44dfc 100644
--- a/src/binding.js
+++ b/src/binding.js
@@ -1,4 +1,5 @@
'use strict'
+/* eslint camelcase: "off" */
var assert = require('assert')
@@ -23,17 +24,15 @@ exports.UNZIP = 7
var GZIP_HEADER_ID1 = 0x1f
var GZIP_HEADER_ID2 = 0x8b
-var GZIP_MIN_HEADER_SIZE = 10
/**
* Emulate Node's zlib C++ layer for use by the JS layer in index.js
*/
function Zlib (mode) {
- if (mode == null || mode < exports.DEFLATE || mode > exports.UNZIP) {
+ if (typeof mode !== 'number' || mode < exports.DEFLATE || mode > exports.UNZIP) {
throw new TypeError('Bad argument')
}
- this.chunk_size = 0
this.dictionary = null
this.err = 0
this.flush = 0
@@ -45,6 +44,7 @@ function Zlib (mode) {
this.windowBits = 0
this.write_in_progress = false
this.pending_close = false
+ this.gzip_id_bytes_read = 0
}
Zlib.prototype.close = function () {
@@ -60,15 +60,14 @@ Zlib.prototype.close = function () {
if (this.mode === exports.DEFLATE || this.mode === exports.GZIP || this.mode === exports.DEFLATERAW) {
zlib_deflate.deflateEnd(this.strm)
- } else {
+ } else if (this.mode === exports.INFLATE || this.mode === exports.GUNZIP ||
+ this.mode === exports.INFLATERAW || this.mode === exports.UNZIP) {
zlib_inflate.inflateEnd(this.strm)
}
this.mode = exports.NONE
- if (this.dictionary != null) {
- this.dictionary = null
- }
+ this.dictionary = null
}
Zlib.prototype.write = function (flush, input, in_off, in_len, out, out_off, out_len) {
@@ -108,12 +107,6 @@ Zlib.prototype._write = function (async, flush, input, in_off, in_len, out, out_
in_off = 0
}
- if (out._set) {
- out.set = out._set
- } else {
- out.set = bufferSet
- }
-
this.strm.avail_in = in_len
this.strm.input = input
this.strm.next_in = in_off
@@ -122,8 +115,6 @@ Zlib.prototype._write = function (async, flush, input, in_off, in_len, out, out_
this.strm.next_out = out_off
this.flush = flush
- this.chunk_size = out_len
-
if (!async) {
// sync version
this._process()
@@ -154,6 +145,8 @@ Zlib.prototype._afterSync = function () {
}
Zlib.prototype._process = function () {
+ var next_expected_header_byte = null
+
// If the avail_out is left at 0, then it means that it ran out
// of room. If there was avail_out left over, then it means
// that all of the input was consumed.
@@ -164,6 +157,50 @@ Zlib.prototype._process = function () {
this.err = zlib_deflate.deflate(this.strm, this.flush)
break
case exports.UNZIP:
+ if (this.strm.avail_in > 0) {
+ next_expected_header_byte = this.strm.next_in
+ }
+
+ switch (this.gzip_id_bytes_read) {
+ case 0:
+ if (next_expected_header_byte === null) {
+ break
+ }
+
+ if (this.strm.input[next_expected_header_byte] === GZIP_HEADER_ID1) {
+ this.gzip_id_bytes_read = 1
+ next_expected_header_byte++
+
+ if (this.strm.avail_in === 1) {
+ // The only available byte was already read.
+ break
+ }
+ } else {
+ this.mode = exports.INFLATE
+ break
+ }
+
+ // fallthrough
+ case 1:
+ if (next_expected_header_byte === null) {
+ break
+ }
+
+ if (this.strm.input[next_expected_header_byte] === GZIP_HEADER_ID2) {
+ this.gzip_id_bytes_read = 2
+ this.mode = exports.GUNZIP
+ } else {
+ // There is no actual difference between INFLATE and INFLATERAW
+ // (after initialization).
+ this.mode = exports.INFLATE
+ }
+
+ break
+ default:
+ throw new Error('invalid number of gzip magic number bytes read')
+ }
+
+ // fallthrough
case exports.INFLATE:
case exports.GUNZIP:
case exports.INFLATERAW:
@@ -183,16 +220,15 @@ Zlib.prototype._process = function () {
this.err = exports.Z_NEED_DICT
}
}
- while (this.strm.avail_in >= GZIP_MIN_HEADER_SIZE &&
- this.mode === exports.GUNZIP) {
+ while (this.strm.avail_in > 0 &&
+ this.mode === exports.GUNZIP &&
+ this.err === exports.Z_STREAM_END &&
+ this.strm.next_in[0] !== 0x00) {
// Bytes remain in input buffer. Perhaps this is another compressed
// member in the same archive, or just trailing garbage.
- // Check the header to find out.
- if (this.strm.next_in[0] !== GZIP_HEADER_ID1 ||
- this.strm.next_in[1] !== GZIP_HEADER_ID2) {
- // Not a valid gzip member
- break
- }
+ // Trailing zero bytes are okay, though, since they are frequently
+ // used for padding.
+
this.reset()
this.err = zlib_inflate.inflate(this.strm, this.flush)
}
@@ -250,6 +286,9 @@ Zlib.prototype._after = function () {
}
Zlib.prototype._error = function (message) {
+ if (this.strm.msg) {
+ message = this.strm.msg
+ }
this.onerror(message, this.err)
// no hope of rescue.
@@ -262,7 +301,7 @@ Zlib.prototype._error = function (message) {
Zlib.prototype.init = function (windowBits, level, memLevel, strategy, dictionary) {
assert(arguments.length === 4 || arguments.length === 5, 'init(windowBits, level, memLevel, strategy, [dictionary])')
- assert(windowBits >= 6 && windowBits <= 15, 'invalid windowBits')
+ assert(windowBits >= 8 && windowBits <= 15, 'invalid windowBits')
assert(level >= -1 && level <= 9, 'invalid compression level')
assert(memLevel >= 1 && memLevel <= 9, 'invalid memlevel')
@@ -390,11 +429,4 @@ Zlib.prototype._reset = function () {
}
}
-// set method for Node buffers, used by pako
-function bufferSet (data, offset) {
- for (var i = 0; i < data.length; i++) {
- this[offset + i] = data[i]
- }
-}
-
exports.Zlib = Zlib
diff --git a/test/build.js b/test/build.js
new file mode 100644
index 0000000..94e4ff5
--- /dev/null
+++ b/test/build.js
@@ -0,0 +1,67 @@
+'use strict'
+
+const browserify = require('browserify')
+const glob = require('glob')
+const path = require('path')
+const fs = require('fs')
+
+function wrap (content, files) {
+ return `
+ (function () {
+ var fs = {}
+ var require
+ var emitOnexit // set in common.js
+
+ var timeouts = {}
+ var timeoutId = 0
+ var setTimeout = function (fn, time) {
+ if (time) throw new Error('time not supported in fake setTimeout')
+ timeouts[++timeoutId] = fn
+ return timeoutId
+ }
+ var clearTimeout = function (id) {
+ delete timeouts[id]
+ }
+ function doTimeouts () {
+ while (Object.keys(timeouts).length) {
+ var id = Object.keys(timeouts)[0]
+ timeouts[id]()
+ delete timeouts[id]
+ }
+ emitOnexit()
+ }
+
+ ;${content}
+
+ describe('zlib-browserify', function () {
+ ${files.map(file => `
+ it('${path.basename(file, '.js')}', function () {
+ require(${JSON.stringify(path.normalize(file))})
+ doTimeouts()
+ })`
+ ).join('')}
+ })
+ })();
+ `
+}
+
+const browserified = path.join(__dirname, 'tmp/browserified.js')
+
+glob(path.join(__dirname, 'test-*'), (err, files) => {
+ if (err) throw err
+
+ // workaround for old assert version in browserify
+ require('browserify/lib/builtins').assert = require.resolve('assert/')
+
+ const b = browserify({
+ transform: ['babelify', 'brfs']
+ })
+
+ b.require(files)
+ b.bundle((err, buf) => {
+ if (err) throw err
+
+ fs.writeFileSync(browserified, wrap(buf, files))
+ console.log('bundled')
+ })
+})
diff --git a/test/common.js b/test/common.js
index 20742d5..40a0250 100644
--- a/test/common.js
+++ b/test/common.js
@@ -1,12 +1,30 @@
'use strict'
+/* global emitOnexit:true */ // eslint-disable-line no-unused-vars
var path = require('path')
var fs = require('fs')
+var stream = require('stream')
+var EventEmitter = require('events')
exports.fixturesDir = path.join(__dirname, 'fixtures')
var mustCallChecks = []
+if (process.on === process.emit) {
+ let emitter
+ const reset = () => {
+ mustCallChecks = []
+ emitter = new EventEmitter()
+ process.on = emitter.on.bind(emitter)
+ process.once = emitter.once.bind(emitter)
+ }
+ emitOnexit = () => {
+ emitter.emit('exit')
+ reset()
+ }
+ reset()
+}
+
function runCallChecks (exitCode) {
if (exitCode !== 0) return
@@ -99,3 +117,50 @@ exports.refreshTmpDir = function () {
rimrafSync(exports.tmpDir)
fs.mkdirSync(exports.tmpDir)
}
+
+const rFS = 'readFileSync' // this stops the brfs static analyzer
+if (!fs[rFS]) {
+ // this is to make it work with brfs
+ const files = {
+ 'elipses.txt': fs.readFileSync(path.resolve('test/fixtures', 'elipses.txt')),
+ // there is a strange issue like https://github.com/nodejs/node-v0.x-archive/issues/7914,
+ // even though that should be fixed.
+ 'empty.txt': Buffer.alloc(0),
+ // 'empty.txt': fs.readFileSync(path.resolve('test/fixtures', 'empty.txt')),
+ 'person.jpg': fs.readFileSync(path.resolve('test/fixtures', 'person.jpg')),
+ 'person.jpg.gz': fs.readFileSync(path.resolve('test/fixtures', 'person.jpg.gz')),
+ 'pseudo-multimember-gzip.gz': fs.readFileSync(path.resolve('test/fixtures', 'pseudo-multimember-gzip.gz')),
+ 'pseudo-multimember-gzip.z': fs.readFileSync(path.resolve('test/fixtures', 'pseudo-multimember-gzip.z'))
+ }
+ Object.keys(files).forEach(file => {
+ files[path.resolve(exports.fixturesDir, file)] = files[file]
+ })
+
+ fs[rFS] = name => {
+ if (!files[name]) throw new Error(`file "${name}" not found`)
+ return files[name]
+ }
+
+ const cRS = 'createReadStream'
+ fs[cRS] = name => {
+ const s = new stream.Readable()
+ s.push(fs.readFileSync(name))
+ s.push(null)
+ return s
+ }
+
+ fs.createWriteStream = name => {
+ const s = new stream.Writable()
+ const chunks = []
+ s._write = (chunk, encoding, callback) => {
+ chunks.push(chunk)
+ callback()
+ }
+ s.on('finish', () => {
+ files[name] = Buffer.concat(chunks)
+ })
+ return s
+ }
+
+ exports.refreshTmpDir = () => {}
+}
diff --git a/test/disabled-test-zlib-flush.js b/test/disabled-test-zlib-flush.js
new file mode 100644
index 0000000..f86c8bb
--- /dev/null
+++ b/test/disabled-test-zlib-flush.js
@@ -0,0 +1,36 @@
+'use strict';
+var common = require('./common');
+var assert = require('assert');
+var zlib = require('../');
+var path = require('path');
+var fs = require('fs');
+
+const file = fs.readFileSync(path.resolve(common.fixturesDir, 'person.jpg'));
+const chunkSize = 16;
+const opts = { level: 0 };
+const deflater = zlib.createDeflate(opts);
+
+const chunk = file.slice(0, chunkSize);
+const expectedNone = Buffer.from([0x78, 0x01]);
+const blkhdr = Buffer.from([0x00, 0x10, 0x00, 0xef, 0xff]);
+const adler32 = Buffer.from([0x00, 0x00, 0x00, 0xff, 0xff]);
+const expectedFull = Buffer.concat([blkhdr, chunk, adler32]);
+let actualNone;
+let actualFull;
+
+deflater.write(chunk, function() {
+ deflater.flush(zlib.Z_NO_FLUSH, function() {
+ actualNone = deflater.read();
+ deflater.flush(function() {
+ var bufs = [], buf;
+ while (buf = deflater.read())
+ bufs.push(buf);
+ actualFull = Buffer.concat(bufs);
+ });
+ });
+});
+
+process.once('exit', function() {
+ assert.deepStrictEqual(actualNone, expectedNone);
+ assert.deepStrictEqual(actualFull, expectedFull);
+});
diff --git a/test/disabled-test-zlib-from-gzip-with-trailing-garbage.js b/test/disabled-test-zlib-from-gzip-with-trailing-garbage.js
new file mode 100644
index 0000000..e1a5998
--- /dev/null
+++ b/test/disabled-test-zlib-from-gzip-with-trailing-garbage.js
@@ -0,0 +1,49 @@
+'use strict';
+// test unzipping a gzip file that has trailing garbage
+
+const common = require('./common');
+const assert = require('assert');
+const zlib = require('../');
+
+// should ignore trailing null-bytes
+let data = Buffer.concat([
+ zlib.gzipSync('abc'),
+ zlib.gzipSync('def'),
+ Buffer(10).fill(0)
+]);
+
+assert.equal(zlib.gunzipSync(data).toString(), 'abcdef');
+
+zlib.gunzip(data, common.mustCall((err, result) => {
+ assert.ifError(err);
+ assert.equal(result, 'abcdef', 'result should match original string');
+}));
+
+// if the trailing garbage happens to look like a gzip header, it should
+// throw an error.
+data = Buffer.concat([
+ zlib.gzipSync('abc'),
+ zlib.gzipSync('def'),
+ Buffer([0x1f, 0x8b, 0xff, 0xff]),
+ Buffer(10).fill(0)
+]);
+
+assert.throws(() => zlib.gunzipSync(data));
+
+zlib.gunzip(data, common.mustCall((err, result) => {
+ assert(err);
+}));
+
+// In this case the trailing junk is too short to be a gzip segment
+// So we ignore it and decompression succeeds.
+data = Buffer.concat([
+ zlib.gzipSync('abc'),
+ zlib.gzipSync('def'),
+ Buffer([0x1f, 0x8b, 0xff, 0xff])
+]);
+
+assert.throws(() => zlib.gunzipSync(data));
+
+zlib.gunzip(data, common.mustCall((err, result) => {
+ assert(err);
+}));
diff --git a/test/disabled-test-zlib-params.js b/test/disabled-test-zlib-params.js
new file mode 100644
index 0000000..606a5d3
--- /dev/null
+++ b/test/disabled-test-zlib-params.js
@@ -0,0 +1,34 @@
+'use strict';
+var common = require('./common');
+var assert = require('assert');
+var zlib = require('../');
+var path = require('path');
+var fs = require('fs');
+
+const file = fs.readFileSync(path.resolve(common.fixturesDir, 'person.jpg'));
+const chunkSize = 12 * 1024;
+const opts = { level: 9, strategy: zlib.Z_DEFAULT_STRATEGY };
+const deflater = zlib.createDeflate(opts);
+
+const chunk1 = file.slice(0, chunkSize);
+const chunk2 = file.slice(chunkSize);
+const blkhdr = Buffer.from([0x00, 0x5a, 0x82, 0xa5, 0x7d]);
+const expected = Buffer.concat([blkhdr, chunk2]);
+let actual;
+
+deflater.write(chunk1, function() {
+ deflater.params(0, zlib.Z_DEFAULT_STRATEGY, function() {
+ while (deflater.read());
+ deflater.end(chunk2, function() {
+ var bufs = [], buf;
+ while (buf = deflater.read())
+ bufs.push(buf);
+ actual = Buffer.concat(bufs);
+ });
+ });
+ while (deflater.read());
+});
+
+process.once('exit', function() {
+ assert.deepStrictEqual(actual, expected);
+});
diff --git a/test/fixtures/empty.txt b/test/fixtures/empty.txt
index 8b13789..e69de29 100644
--- a/test/fixtures/empty.txt
+++ b/test/fixtures/empty.txt
@@ -1 +0,0 @@
-
diff --git a/test/fixtures/pseudo-multimember-gzip.gz b/test/fixtures/pseudo-multimember-gzip.gz
new file mode 100644
index 0000000..a019c48
Binary files /dev/null and b/test/fixtures/pseudo-multimember-gzip.gz differ
diff --git a/test/fixtures/pseudo-multimember-gzip.z b/test/fixtures/pseudo-multimember-gzip.z
new file mode 100644
index 0000000..e87b13a
Binary files /dev/null and b/test/fixtures/pseudo-multimember-gzip.z differ
diff --git a/test/test-zlib-close-after-error.js b/test/test-zlib-close-after-error.js
new file mode 100644
index 0000000..3a11826
--- /dev/null
+++ b/test/test-zlib-close-after-error.js
@@ -0,0 +1,16 @@
+'use strict';
+// https://github.com/nodejs/node/issues/6034
+
+const common = require('./common');
+const assert = require('assert');
+const zlib = require('../');
+
+const decompress = zlib.createGunzip(15);
+
+decompress.on('error', common.mustCall((err) => {
+ assert.strictEqual(decompress._closed, true);
+ assert.doesNotThrow(() => decompress.close());
+}));
+
+assert.strictEqual(decompress._closed, false);
+decompress.write('something invalid');
diff --git a/test/test-zlib-close-after-write.js b/test/test-zlib-close-after-write.js
index a03d369..967c099 100755
--- a/test/test-zlib-close-after-write.js
+++ b/test/test-zlib-close-after-write.js
@@ -1,17 +1,9 @@
-/* eslint-env mocha */
-'use strict'
+'use strict';
+const common = require('./common');
+var zlib = require('../');
-var zlib = require('../')
-
-describe('zlib - close after write', function () {
- it('works', function (done) {
- zlib.gzip('hello', function (err, out) {
- if (err) throw err
- var unzip = zlib.createGunzip()
- unzip.write(out)
- unzip.close(function () {
- done()
- })
- })
- })
-})
+zlib.gzip('hello', common.mustCall(function(err, out) {
+ var unzip = zlib.createGunzip();
+ unzip.write(out);
+ unzip.close(common.mustCall(function() {}));
+}));
diff --git a/test/test-zlib-const.js b/test/test-zlib-const.js
index ae18642..56ebf89 100644
--- a/test/test-zlib-const.js
+++ b/test/test-zlib-const.js
@@ -1,17 +1,17 @@
/* eslint-disable strict */
+require('./common');
+var assert = require('assert');
-var assert = require('assert')
+var zlib = require('../');
-var zlib = require('../')
+assert.equal(zlib.Z_OK, 0, 'Z_OK should be 0');
+zlib.Z_OK = 1;
+assert.equal(zlib.Z_OK, 0, 'Z_OK should be 0');
-assert.equal(zlib.Z_OK, 0, 'Z_OK should be 0')
-zlib.Z_OK = 1
-assert.equal(zlib.Z_OK, 0, 'Z_OK should be 0')
+assert.equal(zlib.codes.Z_OK, 0, 'Z_OK should be 0');
+zlib.codes.Z_OK = 1;
+assert.equal(zlib.codes.Z_OK, 0, 'zlib.codes.Z_OK should be 0');
+zlib.codes = {Z_OK: 1};
+assert.equal(zlib.codes.Z_OK, 0, 'zlib.codes.Z_OK should be 0');
-assert.equal(zlib.codes.Z_OK, 0, 'Z_OK should be 0')
-zlib.codes.Z_OK = 1
-assert.equal(zlib.codes.Z_OK, 0, 'zlib.codes.Z_OK should be 0')
-zlib.codes = {Z_OK: 1}
-assert.equal(zlib.codes.Z_OK, 0, 'zlib.codes.Z_OK should be 0')
-
-assert.ok(Object.isFrozen(zlib.codes), 'zlib.codes should be frozen')
+assert.ok(Object.isFrozen(zlib.codes), 'zlib.codes should be frozen');
diff --git a/test/test-zlib-convenience-methods.js b/test/test-zlib-convenience-methods.js
index dc229a9..5ffa4ec 100755
--- a/test/test-zlib-convenience-methods.js
+++ b/test/test-zlib-convenience-methods.js
@@ -1,71 +1,59 @@
-/* eslint-env mocha */
-'use strict'
-
+'use strict';
// test convenience methods with and without options supplied
-var assert = require('assert')
-var zlib = require('../')
-var hadRun = 0
+require('./common');
+var assert = require('assert');
+var zlib = require('../');
+
+var hadRun = 0;
-var expect = 'blahblahblahblahblahblah'
+var expect = 'blahblahblahblahblahblah';
var opts = {
level: 9,
- chunkSize: 1024
-}
-
-describe('zlib - convenience methods', function () {
- [
- ['gzip', 'gunzip'],
- ['gzip', 'unzip'],
- ['deflate', 'inflate'],
- ['deflateRaw', 'inflateRaw']
- ].forEach(function (method) {
- it(method.join(':'), function (done) {
- var finish = function () {
- hadRun++
- if (hadRun === 4) {
- hadRun = 0
- done()
- }
- }
-
- zlib[method[0]](expect, opts, function (err, result) {
- if (err) throw err
-
- zlib[method[1]](result, opts, function (err, result) {
- if (err) throw err
-
- assert.equal(result, expect,
- 'Should get original string after ' +
- method[0] + '/' + method[1] + ' with options.')
- finish()
- })
- })
-
- zlib[method[0]](expect, function (err, result) {
- if (err) throw err
- zlib[method[1]](result, function (err, result) {
- if (err) throw err
- assert.equal(result, expect,
- 'Should get original string after ' +
- method[0] + '/' + method[1] + ' without options.')
- finish()
- })
- })
-
- var result = zlib[method[0] + 'Sync'](expect, opts)
- result = zlib[method[1] + 'Sync'](result, opts)
+ chunkSize: 1024,
+};
+
+[
+ ['gzip', 'gunzip'],
+ ['gzip', 'unzip'],
+ ['deflate', 'inflate'],
+ ['deflateRaw', 'inflateRaw'],
+].forEach(function(method) {
+
+ zlib[method[0]](expect, opts, function(err, result) {
+ zlib[method[1]](result, opts, function(err, result) {
assert.equal(result, expect,
- 'Should get original string after ' +
- method[0] + '/' + method[1] + ' with options.')
- finish()
-
- result = zlib[method[0] + 'Sync'](expect)
- result = zlib[method[1] + 'Sync'](result)
+ 'Should get original string after ' +
+ method[0] + '/' + method[1] + ' with options.');
+ hadRun++;
+ });
+ });
+
+ zlib[method[0]](expect, function(err, result) {
+ zlib[method[1]](result, function(err, result) {
assert.equal(result, expect,
- 'Should get original string after ' +
- method[0] + '/' + method[1] + ' without options.')
- finish()
- })
- })
-})
+ 'Should get original string after ' +
+ method[0] + '/' + method[1] + ' without options.');
+ hadRun++;
+ });
+ });
+
+ var result = zlib[method[0] + 'Sync'](expect, opts);
+ result = zlib[method[1] + 'Sync'](result, opts);
+ assert.equal(result, expect,
+ 'Should get original string after ' +
+ method[0] + '/' + method[1] + ' with options.');
+ hadRun++;
+
+ result = zlib[method[0] + 'Sync'](expect);
+ result = zlib[method[1] + 'Sync'](result);
+ assert.equal(result, expect,
+ 'Should get original string after ' +
+ method[0] + '/' + method[1] + ' without options.');
+ hadRun++;
+
+});
+
+process.on('exit', function() {
+ assert.equal(hadRun, 16, 'expect 16 compressions');
+});
diff --git a/test/test-zlib-dictionary-fail.js b/test/test-zlib-dictionary-fail.js
index 77e28b2..782e63b 100644
--- a/test/test-zlib-dictionary-fail.js
+++ b/test/test-zlib-dictionary-fail.js
@@ -1,34 +1,28 @@
-/* eslint-env mocha */
-'use strict'
+'use strict';
+var common = require('./common');
+var assert = require('assert');
+var zlib = require('../');
-var assert = require('assert')
-var common = require('./common')
-var zlib = require('../')
+// Should raise an error, not trigger an assertion in src/node_zlib.cc
+{
+ const stream = zlib.createInflate();
-describe('zlib - dictionary fails', function () {
- it('should fail on missing dictionary', function (done) {
- // Should raise an error, not trigger an assertion in src/node_zlib.cc
- var stream = zlib.createInflate()
+ stream.on('error', common.mustCall(function(err) {
+ assert(/Missing dictionary/.test(err.message));
+ }));
- stream.on('error', common.mustCall(function (err) {
- assert(/Missing dictionary/.test(err.message))
- done()
- }))
+ // String "test" encoded with dictionary "dict".
+ stream.write(Buffer.from([0x78, 0xBB, 0x04, 0x09, 0x01, 0xA5]));
+}
- // String "test" encoded with dictionary "dict".
- stream.write(Buffer([0x78, 0xBB, 0x04, 0x09, 0x01, 0xA5]))
- })
+// Should raise an error, not trigger an assertion in src/node_zlib.cc
+{
+ const stream = zlib.createInflate({ dictionary: Buffer.from('fail') });
- it('should fail on a bad dictionary', function (done) {
- // Should raise an error, not trigger an assertion in src/node_zlib.cc
- var stream = zlib.createInflate({ dictionary: Buffer('fail') })
+ stream.on('error', common.mustCall(function(err) {
+ assert(/Bad dictionary/.test(err.message));
+ }));
- stream.on('error', common.mustCall(function (err) {
- assert(/Bad dictionary/.test(err.message))
- done()
- }))
-
- // String "test" encoded with dictionary "dict".
- stream.write(Buffer([0x78, 0xBB, 0x04, 0x09, 0x01, 0xA5]))
- })
-})
+ // String "test" encoded with dictionary "dict".
+ stream.write(Buffer.from([0x78, 0xBB, 0x04, 0x09, 0x01, 0xA5]));
+}
diff --git a/test/test-zlib-dictionary.js b/test/test-zlib-dictionary.js
index 30db4f2..0a62897 100644
--- a/test/test-zlib-dictionary.js
+++ b/test/test-zlib-dictionary.js
@@ -1,11 +1,11 @@
-/* eslint-env mocha */
-'use strict'
-
+'use strict';
// test compression/decompression with dictionary
-var assert = require('assert')
-var zlib = require('../')
-var spdyDict = new Buffer([
+require('./common');
+const assert = require('assert');
+const zlib = require('../');
+
+const spdyDict = Buffer.from([
'optionsgetheadpostputdeletetraceacceptaccept-charsetaccept-encodingaccept-',
'languageauthorizationexpectfromhostif-modified-sinceif-matchif-none-matchi',
'f-rangeif-unmodifiedsincemax-forwardsproxy-authorizationrangerefererteuser',
@@ -19,73 +19,71 @@ var spdyDict = new Buffer([
'pOctNovDecchunkedtext/htmlimage/pngimage/jpgimage/gifapplication/xmlapplic',
'ation/xhtmltext/plainpublicmax-agecharset=iso-8859-1utf-8gzipdeflateHTTP/1',
'.1statusversionurl\0'
-].join(''))
+].join(''));
-var input = [
+const input = [
'HTTP/1.1 200 Ok',
'Server: node.js',
'Content-Length: 0',
''
-].join('\r\n')
-
-describe('zlib - dictionary', function () {
- it('basic dictionary', function (done) {
- var output = ''
- var deflate = zlib.createDeflate({ dictionary: spdyDict })
- var inflate = zlib.createInflate({ dictionary: spdyDict })
-
- deflate.on('data', function (chunk) {
- inflate.write(chunk)
- })
-
- inflate.on('data', function (chunk) {
- output += chunk
- })
-
- deflate.on('end', function () {
- inflate.end()
- })
-
- inflate.on('end', function () {
- assert.equal(input, output)
- done()
- })
-
- deflate.write(input)
- deflate.end()
- })
-
- it('deflate reset dictionary', function (done) {
- var doneReset = false
- var output = ''
- var deflate = zlib.createDeflate({ dictionary: spdyDict })
- var inflate = zlib.createInflate({ dictionary: spdyDict })
-
- deflate.on('data', function (chunk) {
- if (doneReset) {
- inflate.write(chunk)
- }
- })
-
- inflate.on('data', function (chunk) {
- output += chunk
- })
-
- deflate.on('end', function () {
- inflate.end()
- })
-
- inflate.on('end', function () {
- assert.equal(input, output)
- done()
- })
-
- deflate.write(input)
- deflate.flush(function () {
- deflate.reset()
- doneReset = true
- deflate.write(input)
- deflate.end()
- })
- })
-})
+].join('\r\n');
+
+function basicDictionaryTest() {
+ let output = '';
+ const deflate = zlib.createDeflate({ dictionary: spdyDict });
+ const inflate = zlib.createInflate({ dictionary: spdyDict });
+
+ deflate.on('data', function(chunk) {
+ inflate.write(chunk);
+ });
+
+ inflate.on('data', function(chunk) {
+ output += chunk;
+ });
+
+ deflate.on('end', function() {
+ inflate.end();
+ });
+
+ inflate.on('end', function() {
+ assert.equal(input, output);
+ });
+
+ deflate.write(input);
+ deflate.end();
+}
+
+function deflateResetDictionaryTest() {
+ let doneReset = false;
+ let output = '';
+ const deflate = zlib.createDeflate({ dictionary: spdyDict });
+ const inflate = zlib.createInflate({ dictionary: spdyDict });
+
+ deflate.on('data', function(chunk) {
+ if (doneReset)
+ inflate.write(chunk);
+ });
+
+ inflate.on('data', function(chunk) {
+ output += chunk;
+ });
+
+ deflate.on('end', function() {
+ inflate.end();
+ });
+
+ inflate.on('end', function() {
+ assert.equal(input, output);
+ });
+
+ deflate.write(input);
+ deflate.flush(function() {
+ deflate.reset();
+ doneReset = true;
+ deflate.write(input);
+ deflate.end();
+ });
+}
+
+basicDictionaryTest();
+deflateResetDictionaryTest();
diff --git a/test/test-zlib-flush-drain.js b/test/test-zlib-flush-drain.js
index dc2d717..a474fc9 100644
--- a/test/test-zlib-flush-drain.js
+++ b/test/test-zlib-flush-drain.js
@@ -1,55 +1,48 @@
-/* eslint-env mocha */
-'use strict'
+'use strict';
+require('./common');
+const assert = require('assert');
+const zlib = require('../');
-var assert = require('assert')
-var zlib = require('../')
+const bigData = Buffer.alloc(10240, 'x');
-var bigData = new Buffer(10240).fill('x')
-
-var opts = {
+const opts = {
level: 0,
highWaterMark: 16
-}
+};
-var deflater = zlib.createDeflate(opts)
+const deflater = zlib.createDeflate(opts);
// shim deflater.flush so we can count times executed
-var flushCount = 0
-var drainCount = 0
-
-describe('zlib - flush drain', function () {
- it('works', function (done) {
- var flush = deflater.flush
- deflater.flush = function (kind, callback) {
- flushCount++
- flush.call(this, kind, callback)
- }
-
- deflater.write(bigData)
-
- var ws = deflater._writableState
- var beforeFlush = ws.needDrain
- var afterFlush = ws.needDrain
-
- deflater.flush(function (err) {
- if (err) throw err
- afterFlush = ws.needDrain
-
- assert.equal(afterFlush, false,
- 'after calling flush the writable stream should not need to drain')
- assert.equal(drainCount, 1,
- 'the deflater should have emitted a single drain event')
- assert.equal(flushCount, 2,
- 'flush should be called twice')
-
- done()
- })
-
- deflater.on('drain', function () {
- drainCount++
- })
-
- assert.equal(beforeFlush, true,
- 'before calling flush the writable stream should need to drain')
- })
-})
+var flushCount = 0;
+var drainCount = 0;
+
+const flush = deflater.flush;
+deflater.flush = function(kind, callback) {
+ flushCount++;
+ flush.call(this, kind, callback);
+};
+
+deflater.write(bigData);
+
+const ws = deflater._writableState;
+const beforeFlush = ws.needDrain;
+var afterFlush = ws.needDrain;
+
+deflater.flush(function(err) {
+ afterFlush = ws.needDrain;
+});
+
+deflater.on('drain', function() {
+ drainCount++;
+});
+
+process.once('exit', function() {
+ assert.equal(beforeFlush, true,
+ 'before calling flush, writable stream should need to drain');
+ assert.equal(afterFlush, false,
+ 'after calling flush, writable stream should not need to drain');
+ assert.equal(drainCount, 1,
+ 'the deflater should have emitted a single drain event');
+ assert.equal(flushCount, 2,
+ 'flush should be called twice');
+});
diff --git a/test/test-zlib-flush-flags.js b/test/test-zlib-flush-flags.js
new file mode 100644
index 0000000..1a35b73
--- /dev/null
+++ b/test/test-zlib-flush-flags.js
@@ -0,0 +1,28 @@
+'use strict';
+require('./common');
+const assert = require('assert');
+const zlib = require('../');
+
+assert.doesNotThrow(() => {
+ zlib.createGzip({ flush: zlib.Z_SYNC_FLUSH });
+});
+
+assert.throws(() => {
+ zlib.createGzip({ flush: 'foobar' });
+}, /Invalid flush flag: foobar/);
+
+assert.throws(() => {
+ zlib.createGzip({ flush: 10000 });
+}, /Invalid flush flag: 10000/);
+
+assert.doesNotThrow(() => {
+ zlib.createGzip({ finishFlush: zlib.Z_SYNC_FLUSH });
+});
+
+assert.throws(() => {
+ zlib.createGzip({ finishFlush: 'foobar' });
+}, /Invalid flush flag: foobar/);
+
+assert.throws(() => {
+ zlib.createGzip({ finishFlush: 10000 });
+}, /Invalid flush flag: 10000/);
diff --git a/test/test-zlib-flush.js b/test/test-zlib-flush.js
deleted file mode 100644
index 277eb42..0000000
--- a/test/test-zlib-flush.js
+++ /dev/null
@@ -1,42 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-var assert = require('assert')
-var zlib = require('../')
-var path = require('path')
-var fs = require('fs')
-
-describe.skip('zlib - flush', function () {
- it('works', function (done) {
- var file = fs.readFileSync(path.join(__dirname, 'fixtures', 'person.jpg'))
- var chunkSize = 16
- var opts = { level: 0 }
- var deflater = zlib.createDeflate(opts)
-
- var chunk = file.slice(0, chunkSize)
- var expectedNone = new Buffer([0x78, 0x01])
- var blkhdr = new Buffer([0x00, 0x10, 0x00, 0xef, 0xff])
- var adler32 = new Buffer([0x00, 0x00, 0x00, 0xff, 0xff])
- var expectedFull = Buffer.concat([blkhdr, chunk, adler32])
- var actualNone
- var actualFull
-
- deflater.write(chunk, function () {
- deflater.flush(zlib.Z_NO_FLUSH, function () {
- actualNone = deflater.read()
- deflater.flush(function () {
- var bufs = []
- var buf
- // eslint-disable-next-line
- while (buf = deflater.read()) {}
- bufs.push(buf)
- actualFull = Buffer.concat(bufs)
- assert.deepEqual(actualNone, expectedNone)
- assert.deepEqual(actualFull, expectedFull)
-
- done()
- })
- })
- })
- })
-})
diff --git a/test/test-zlib-from-concatenated-gzip.js b/test/test-zlib-from-concatenated-gzip.js
index 7007928..75a0448 100644
--- a/test/test-zlib-from-concatenated-gzip.js
+++ b/test/test-zlib-from-concatenated-gzip.js
@@ -1,24 +1,80 @@
-/* eslint-env mocha */
-'use strict'
-
+'use strict';
// Test unzipping a gzip file that contains multiple concatenated "members"
-var common = require('./common')
-var assert = require('assert')
-var zlib = require('../')
-
-describe.skip('zlib - from concatenated gzip', function () {
- it('works', function (done) {
- var data = Buffer.concat([
- zlib.gzipSync('abc'),
- zlib.gzipSync('def')
- ])
-
- assert.equal(zlib.gunzipSync(data).toString(), 'abcdef')
-
- zlib.gunzip(data, common.mustCall(function (err, result) {
- assert.ifError(err)
- assert.equal(result, 'abcdef', 'result should match original string')
- done()
- }))
+
+const common = require('./common');
+const assert = require('assert');
+const zlib = require('../');
+const path = require('path');
+const fs = require('fs');
+
+const abcEncoded = zlib.gzipSync('abc');
+const defEncoded = zlib.gzipSync('def');
+
+const data = Buffer.concat([
+ abcEncoded,
+ defEncoded
+]);
+
+assert.equal(zlib.gunzipSync(data).toString(), 'abcdef');
+
+zlib.gunzip(data, common.mustCall((err, result) => {
+ assert.ifError(err);
+ assert.equal(result, 'abcdef', 'result should match original string');
+}));
+
+zlib.unzip(data, common.mustCall((err, result) => {
+ assert.ifError(err);
+ assert.equal(result, 'abcdef', 'result should match original string');
+}));
+
+// Multi-member support does not apply to zlib inflate/deflate.
+zlib.unzip(Buffer.concat([
+ zlib.deflateSync('abc'),
+ zlib.deflateSync('def')
+]), common.mustCall((err, result) => {
+ assert.ifError(err);
+ assert.equal(result, 'abc', 'result should match contents of first "member"');
+}));
+
+// files that have the "right" magic bytes for starting a new gzip member
+// in the middle of themselves, even if they are part of a single
+// regularly compressed member
+const pmmFileZlib = path.join(common.fixturesDir, 'pseudo-multimember-gzip.z');
+const pmmFileGz = path.join(common.fixturesDir, 'pseudo-multimember-gzip.gz');
+
+const pmmExpected = zlib.inflateSync(fs.readFileSync(pmmFileZlib));
+const pmmResultBuffers = [];
+
+fs.createReadStream(pmmFileGz)
+ .pipe(zlib.createGunzip())
+ .on('error', (err) => {
+ assert.ifError(err);
})
-})
+ .on('data', (data) => pmmResultBuffers.push(data))
+ .on('finish', common.mustCall(() => {
+ assert.deepStrictEqual(Buffer.concat(pmmResultBuffers), pmmExpected,
+ 'result should match original random garbage');
+ }));
+
+// test that the next gzip member can wrap around the input buffer boundary
+[0, 1, 2, 3, 4, defEncoded.length].forEach((offset) => {
+ const resultBuffers = [];
+
+ const unzip = zlib.createGunzip()
+ .on('error', (err) => {
+ assert.ifError(err);
+ })
+ .on('data', (data) => resultBuffers.push(data))
+ .on('finish', common.mustCall(() => {
+ assert.strictEqual(Buffer.concat(resultBuffers).toString(), 'abcdef',
+ `result should match original input (offset = ${offset})`);
+ }));
+
+ // first write: write "abc" + the first bytes of "def"
+ unzip.write(Buffer.concat([
+ abcEncoded, defEncoded.slice(0, offset)
+ ]));
+
+ // write remaining bytes of "def"
+ unzip.end(defEncoded.slice(offset));
+});
diff --git a/test/test-zlib-from-gzip-with-trailing-garbage.js b/test/test-zlib-from-gzip-with-trailing-garbage.js
deleted file mode 100644
index ff2b965..0000000
--- a/test/test-zlib-from-gzip-with-trailing-garbage.js
+++ /dev/null
@@ -1,59 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-// test unzipping a gzip file that has trailing garbage
-var common = require('./common')
-var assert = require('assert')
-var zlib = require('../')
-
-describe.skip('zlib - from gzip with trailing garbage', function () {
- it('should ignore trailing null-bytes', function (done) {
- var data = Buffer.concat([
- zlib.gzipSync('abc'),
- zlib.gzipSync('def'),
- Buffer(10).fill(0)
- ])
-
- assert.equal(zlib.gunzipSync(data).toString(), 'abcdef')
-
- zlib.gunzip(data, common.mustCall(function (err, result) {
- assert.ifError(err)
- assert.equal(result, 'abcdef', 'result should match original string')
- done()
- }))
- })
-
- it('should throw on gzip header garbage', function (done) {
- var data = Buffer.concat([
- zlib.gzipSync('abc'),
- zlib.gzipSync('def'),
- Buffer([0x1f, 0x8b, 0xff, 0xff]),
- Buffer(10).fill(0)
- ])
-
- assert.throws(function () {
- zlib.gunzipSync(data)
- })
-
- zlib.gunzip(data, common.mustCall(function (err, result) {
- assert(err)
- done()
- }))
- })
-
- it('should throw on junk that is too short', function (done) {
- var data = Buffer.concat([
- zlib.gzipSync('abc'),
- zlib.gzipSync('def'),
- Buffer([0x1f, 0x8b, 0xff, 0xff])
- ])
-
- assert.equal(zlib.gunzipSync(data).toString(), 'abcdef')
-
- zlib.gunzip(data, common.mustCall(function (err, result) {
- assert.ifError(err)
- assert.equal(result, 'abcdef', 'result should match original string')
- done()
- }))
- })
-})
diff --git a/test/test-zlib-from-gzip.js b/test/test-zlib-from-gzip.js
index e68021e..a2794d7 100644
--- a/test/test-zlib-from-gzip.js
+++ b/test/test-zlib-from-gzip.js
@@ -1,39 +1,30 @@
-/* eslint-env mocha */
-'use strict'
-
+'use strict';
// test unzipping a file that was created with a non-node gzip lib,
// piped in as fast as possible.
-var common = require('./common')
-var assert = require('assert')
-var zlib = require('../')
-var path = require('path')
-var fs = require('fs')
-describe('zlib - from gzip', function () {
- it('works', function (done) {
- if (global.window) {
- return done()
- }
+var common = require('./common');
+var assert = require('assert');
+var zlib = require('../');
+var path = require('path');
+
+common.refreshTmpDir();
- common.refreshTmpDir()
+var gunzip = zlib.createGunzip();
- var gunzip = zlib.createGunzip()
+var fs = require('fs');
- var fixture = path.resolve(common.fixturesDir, 'person.jpg.gz')
- var unzippedFixture = path.resolve(common.fixturesDir, 'person.jpg')
- var outputFile = path.resolve(common.tmpDir, 'person.jpg')
- var expect = fs.readFileSync(unzippedFixture)
- var inp = fs.createReadStream(fixture)
- var out = fs.createWriteStream(outputFile)
+var fixture = path.resolve(common.fixturesDir, 'person.jpg.gz');
+var unzippedFixture = path.resolve(common.fixturesDir, 'person.jpg');
+var outputFile = path.resolve(common.tmpDir, 'person.jpg');
+var expect = fs.readFileSync(unzippedFixture);
+var inp = fs.createReadStream(fixture);
+var out = fs.createWriteStream(outputFile);
- inp.pipe(gunzip).pipe(out)
- out.on('close', function () {
- var actual = fs.readFileSync(outputFile)
- assert.equal(actual.length, expect.length, 'length should match')
- for (var i = 0, l = actual.length; i < l; i++) {
- assert.equal(actual[i], expect[i], 'byte[' + i + ']')
- }
- done()
- })
- })
-})
+inp.pipe(gunzip).pipe(out);
+out.on('close', function() {
+ var actual = fs.readFileSync(outputFile);
+ assert.equal(actual.length, expect.length, 'length should match');
+ for (var i = 0, l = actual.length; i < l; i++) {
+ assert.equal(actual[i], expect[i], 'byte[' + i + ']');
+ }
+});
diff --git a/test/test-zlib-from-string.js b/test/test-zlib-from-string.js
index ff51a2f..b287840 100755
--- a/test/test-zlib-from-string.js
+++ b/test/test-zlib-from-string.js
@@ -1,78 +1,63 @@
-/* eslint-env mocha */
-'use strict'
-
+'use strict';
// test compressing and uncompressing a string with zlib
-var assert = require('assert')
-var zlib = require('../')
-var inputString = 'ΩΩLorem ipsum dolor sit amet, consectetur adipiscing el' +
- 'it. Morbi faucibus, purus at gravida dictum, libero arcu convallis la' +
- 'cus, in commodo libero metus eu nisi. Nullam commodo, neque nec porta' +
- ' placerat, nisi est fermentum augue, vitae gravida tellus sapien sit ' +
- 'amet tellus. Aenean non diam orci. Proin quis elit turpis. Suspendiss' +
- 'e non diam ipsum. Suspendisse nec ullamcorper odio. Vestibulum arcu m' +
- 'i, sodales non suscipit id, ultrices ut massa. Sed ac sem sit amet ar' +
- 'cu malesuada fermentum. Nunc sed. '
-var expectedBase64Deflate = 'eJxdUUtOQzEMvMoc4OndgT0gJCT2buJWlpI4jePeqZfpm' +
- 'XAKLRKbLOzx/HK73q6vOrhCunlF1qIDJhNUeW5I2ozT5OkDlKWLJWkncJG5403HQXAkT3' +
- 'Jw29B9uIEmToMukglZ0vS6ociBh4JG8sV4oVLEUCitK2kxq1WzPnChHDzsaGKy491Lofo' +
- 'AbWh8do43oeuYhB5EPCjcLjzYJo48KrfQBvnJecNFJvHT1+RSQsGoC7dn2t/xjhduTA1N' +
- 'WyQIZR0pbHwMDatnD+crPqKSqGPHp1vnlsWM/07ubf7bheF7kqSj84Bm0R1fYTfaK8vqq' +
- 'qfKBtNMhe3OZh6N95CTvMX5HJJi4xOVzCgUOIMSLH7wmeOHaFE4RdpnGavKtrB5xzfO/Ll9'
-var expectedBase64Gzip = 'H4sIAAAAAAAAA11RS05DMQy8yhzg6d2BPSAkJPZu4laWkjiN' +
- '496pl+mZcAotEpss7PH8crverq86uEK6eUXWogMmE1R5bkjajNPk6QOUpYslaSdwkbnjT' +
- 'cdBcCRPcnDb0H24gSZOgy6SCVnS9LqhyIGHgkbyxXihUsRQKK0raTGrVbM+cKEcPOxoYr' +
- 'Lj3Uuh+gBtaHx2jjeh65iEHkQ8KNwuPNgmjjwqt9AG+cl5w0Um8dPX5FJCwagLt2fa3/G' +
- 'OF25MDU1bJAhlHSlsfAwNq2cP5ys+opKoY8enW+eWxYz/Tu5t/tuF4XuSpKPzgGbRHV9h' +
- 'N9ory+qqp8oG00yF7c5mHo33kJO8xfkckmLjE5XMKBQ4gxIsfvCZ44doUThF2mcZq8q2s' +
- 'HnHNzRtagj5AQAA'
+require('./common');
+var assert = require('assert');
+var zlib = require('../');
-describe('zlib - from string', function () {
- it('deflate', function (done) {
- zlib.deflate(inputString, function (err, buffer) {
- if (err) throw err
- assert.equal(buffer.toString('base64'), expectedBase64Deflate,
- 'deflate encoded string should match')
- done()
- })
- })
+var inputString = 'ΩΩLorem ipsum dolor sit amet, consectetur adipiscing elit.' +
+ ' Morbi faucibus, purus at gravida dictum, libero arcu conv' +
+ 'allis lacus, in commodo libero metus eu nisi. Nullam commo' +
+ 'do, neque nec porta placerat, nisi est fermentum augue, vi' +
+ 'tae gravida tellus sapien sit amet tellus. Aenean non diam' +
+ ' orci. Proin quis elit turpis. Suspendisse non diam ipsum.' +
+ ' Suspendisse nec ullamcorper odio. Vestibulum arcu mi, sod' +
+ 'ales non suscipit id, ultrices ut massa. Sed ac sem sit am' +
+ 'et arcu malesuada fermentum. Nunc sed. ';
+var expectedBase64Deflate = 'eJxdUUtOQzEMvMoc4OndgT0gJCT2buJWlpI4jePeqZfpmXAK' +
+ 'LRKbLOzx/HK73q6vOrhCunlF1qIDJhNUeW5I2ozT5OkDlKWL' +
+ 'JWkncJG5403HQXAkT3Jw29B9uIEmToMukglZ0vS6ociBh4JG' +
+ '8sV4oVLEUCitK2kxq1WzPnChHDzsaGKy491LofoAbWh8do43' +
+ 'oeuYhB5EPCjcLjzYJo48KrfQBvnJecNFJvHT1+RSQsGoC7dn' +
+ '2t/xjhduTA1NWyQIZR0pbHwMDatnD+crPqKSqGPHp1vnlsWM' +
+ '/07ubf7bheF7kqSj84Bm0R1fYTfaK8vqqqfKBtNMhe3OZh6N' +
+ '95CTvMX5HJJi4xOVzCgUOIMSLH7wmeOHaFE4RdpnGavKtrB5' +
+ 'xzfO/Ll9';
+var expectedBase64Gzip = 'H4sIAAAAAAAAA11RS05DMQy8yhzg6d2BPSAkJPZu4laWkjiN496' +
+ 'pl+mZcAotEpss7PH8crverq86uEK6eUXWogMmE1R5bkjajNPk6Q' +
+ 'OUpYslaSdwkbnjTcdBcCRPcnDb0H24gSZOgy6SCVnS9LqhyIGHg' +
+ 'kbyxXihUsRQKK0raTGrVbM+cKEcPOxoYrLj3Uuh+gBtaHx2jjeh' +
+ '65iEHkQ8KNwuPNgmjjwqt9AG+cl5w0Um8dPX5FJCwagLt2fa3/G' +
+ 'OF25MDU1bJAhlHSlsfAwNq2cP5ys+opKoY8enW+eWxYz/Tu5t/t' +
+ 'uF4XuSpKPzgGbRHV9hN9ory+qqp8oG00yF7c5mHo33kJO8xfkck' +
+ 'mLjE5XMKBQ4gxIsfvCZ44doUThF2mcZq8q2sHnHNzRtagj5AQAA';
- it('gzip', function (done) {
- zlib.gzip(inputString, function (err, buffer) {
- if (err) throw err
+zlib.deflate(inputString, function(err, buffer) {
+ assert.equal(buffer.toString('base64'), expectedBase64Deflate,
+ 'deflate encoded string should match');
+});
- // Can't actually guarantee that we'll get exactly the same
- // deflated bytes when we compress a string, since the header
- // depends on stuff other than the input string itself.
- // However, decrypting it should definitely yield the same
- // result that we're expecting, and this should match what we get
- // from inflating the known valid deflate data.
- zlib.gunzip(buffer, function (err, gunzipped) {
- if (err) throw err
- assert.equal(gunzipped.toString(), inputString,
- 'Should get original string after gzip/gunzip')
- done()
- })
- })
- })
+zlib.gzip(inputString, function(err, buffer) {
+ // Can't actually guarantee that we'll get exactly the same
+ // deflated bytes when we compress a string, since the header
+ // depends on stuff other than the input string itself.
+ // However, decrypting it should definitely yield the same
+ // result that we're expecting, and this should match what we get
+ // from inflating the known valid deflate data.
+ zlib.gunzip(buffer, function(err, gunzipped) {
+ assert.equal(gunzipped.toString(), inputString,
+ 'Should get original string after gzip/gunzip');
+ });
+});
- it('unzip inflated', function (done) {
- var buffer = new Buffer(expectedBase64Deflate, 'base64')
- zlib.unzip(buffer, function (err, buffer) {
- if (err) throw err
- assert.equal(buffer.toString(), inputString,
- 'decoded inflated string should match')
- done()
- })
- })
+var buffer = Buffer.from(expectedBase64Deflate, 'base64');
+zlib.unzip(buffer, function(err, buffer) {
+ assert.equal(buffer.toString(), inputString,
+ 'decoded inflated string should match');
+});
- it('unzip gunzipped', function (done) {
- var buffer = new Buffer(expectedBase64Gzip, 'base64')
- zlib.unzip(buffer, function (err, buffer) {
- if (err) throw err
- assert.equal(buffer.toString(), inputString,
- 'decoded gunzipped string should match')
- done()
- })
- })
-})
+buffer = Buffer.from(expectedBase64Gzip, 'base64');
+zlib.unzip(buffer, function(err, buffer) {
+ assert.equal(buffer.toString(), inputString,
+ 'decoded gunzipped string should match');
+});
diff --git a/test/test-zlib-invalid-input.js b/test/test-zlib-invalid-input.js
index e1979f4..34b8e2a 100755
--- a/test/test-zlib-invalid-input.js
+++ b/test/test-zlib-invalid-input.js
@@ -1,61 +1,45 @@
-/* eslint-env mocha */
-'use strict'
-
+'use strict';
// test uncompressing invalid input
-var assert = require('assert')
-var zlib = require('../')
-var nonStringInputs = [1, true, {a: 1}, ['a']]
+require('./common');
+const assert = require('assert');
+const zlib = require('../');
+
+var nonStringInputs = [1, true, {a: 1}, ['a']];
-describe('zlib - invalid input', function () {
- it('non strings', function (done) {
- var i = 0
- var finish = function () {
- i++
- if (i === 3) {
- done()
- }
- }
- nonStringInputs.forEach(function (input) {
- // zlib.gunzip should not throw an error when called with bad input.
- assert.doesNotThrow(function () {
- zlib.gunzip(input, function (err, buffer) {
- // zlib.gunzip should pass the error to the callback.
- assert.ok(err)
- finish()
- })
- })
- })
- })
+console.error('Doing the non-strings');
+nonStringInputs.forEach(function(input) {
+ // zlib.gunzip should not throw an error when called with bad input.
+ assert.doesNotThrow(function() {
+ zlib.gunzip(input, function(err, buffer) {
+ // zlib.gunzip should pass the error to the callback.
+ assert.ok(err);
+ });
+ });
+});
- it('unzips', function (done) {
- // zlib.Unzip classes need to get valid data, or else they'll throw.
- var unzips = [
- zlib.Unzip(),
- zlib.Gunzip(),
- zlib.Inflate(),
- zlib.InflateRaw()
- ]
- var hadError = []
+console.error('Doing the unzips');
+// zlib.Unzip classes need to get valid data, or else they'll throw.
+var unzips = [ zlib.Unzip(),
+ zlib.Gunzip(),
+ zlib.Inflate(),
+ zlib.InflateRaw() ];
+var hadError = [];
+unzips.forEach(function(uz, i) {
+ console.error('Error for ' + uz.constructor.name);
+ uz.on('error', function(er) {
+ console.error('Error event', er);
+ hadError[i] = true;
+ });
- var finish = function (i) {
- hadError[i] = true
- if (hadError.length === 4) {
- assert.deepEqual(hadError, [true, true, true, true], 'expect 4 errors')
- done()
- }
- }
- unzips.forEach(function (uz, i) {
- uz.on('error', function (er) {
- finish(i)
- })
+ uz.on('end', function(er) {
+ throw new Error('end event should not be emitted ' + uz.constructor.name);
+ });
- uz.on('end', function (er) {
- throw new Error('end event should not be emitted ' + uz.constructor.name)
- })
+ // this will trigger error event
+ uz.write('this is not valid compressed data.');
+});
- // this will trigger error event
- uz.write('this is not valid compressed data.')
- })
- })
-})
+process.on('exit', function() {
+ assert.deepStrictEqual(hadError, [true, true, true, true], 'expect 4 errors');
+});
diff --git a/test/test-zlib-params.js b/test/test-zlib-params.js
deleted file mode 100644
index 953aedd..0000000
--- a/test/test-zlib-params.js
+++ /dev/null
@@ -1,40 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-var assert = require('assert')
-var zlib = require('../')
-var path = require('path')
-var fs = require('fs')
-
-var file = fs.readFileSync(path.join(__dirname, 'fixtures', 'person.jpg'))
-var chunkSize = 12 * 1024
-var opts = { level: 9, strategy: zlib.Z_DEFAULT_STRATEGY }
-var deflater = zlib.createDeflate(opts)
-
-var chunk1 = file.slice(0, chunkSize)
-var chunk2 = file.slice(chunkSize)
-var blkhdr = new Buffer([0x00, 0x5a, 0x82, 0xa5, 0x7d])
-var expected = Buffer.concat([blkhdr, chunk2])
-var actual
-
-describe.skip('zlib - params', function () {
- it('works', function (done) {
- deflater.write(chunk1, function () {
- deflater.params(0, zlib.Z_DEFAULT_STRATEGY, function () {
- while (deflater.read()) {}
- deflater.end(chunk2, function () {
- var bufs = []
- var buf
- // eslint-disable-next-line
- while (buf = deflater.read()) {
- bufs.push(buf)
- }
- actual = Buffer.concat(bufs)
- assert.deepEqual(actual, expected)
- done()
- })
- })
- while (deflater.read()) {}
- })
- })
-})
diff --git a/test/test-zlib-random-byte-pipes.js b/test/test-zlib-random-byte-pipes.js
index f77c8b0..b6e7b4b 100755
--- a/test/test-zlib-random-byte-pipes.js
+++ b/test/test-zlib-random-byte-pipes.js
@@ -1,162 +1,158 @@
-/* eslint-env mocha */
-'use strict'
+'use strict';
+var common = require('./common');
+var assert = require('assert');
-var assert = require('assert')
-var crypto = require('crypto')
+// if (!common.hasCrypto) {
+// common.skip('missing crypto');
+// return;
+// }
+var crypto = require('crypto');
+
+var stream = require('stream');
+var Stream = stream.Stream;
+var util = require('util');
+var zlib = require('../');
-var stream = require('stream')
-var Stream = stream.Stream
-var util = require('util')
-var zlib = require('../')
// emit random bytes, and keep a shasum
-function RandomReadStream (opt) {
- Stream.call(this)
+function RandomReadStream(opt) {
+ Stream.call(this);
- this.readable = true
- this._paused = false
- this._processing = false
+ this.readable = true;
+ this._paused = false;
+ this._processing = false;
- this._hasher = crypto.createHash('sha1')
- opt = opt || {}
+ this._hasher = crypto.createHash('sha1');
+ opt = opt || {};
// base block size.
- opt.block = opt.block || 256 * 1024
+ opt.block = opt.block || 256 * 1024;
// total number of bytes to emit
- opt.total = opt.total || 256 * 1024 * 1024
- this._remaining = opt.total
+ opt.total = opt.total || 256 * 1024 * 1024;
+ this._remaining = opt.total;
// how variable to make the block sizes
- opt.jitter = opt.jitter || 1024
+ opt.jitter = opt.jitter || 1024;
- this._opt = opt
+ this._opt = opt;
- this._process = this._process.bind(this)
+ this._process = this._process.bind(this);
- process.nextTick(this._process)
+ process.nextTick(this._process);
}
-util.inherits(RandomReadStream, Stream)
+util.inherits(RandomReadStream, Stream);
-RandomReadStream.prototype.pause = function () {
- this._paused = true
- this.emit('pause')
-}
+RandomReadStream.prototype.pause = function() {
+ this._paused = true;
+ this.emit('pause');
+};
-RandomReadStream.prototype.resume = function () {
- // console.error("rrs resume")
- this._paused = false
- this.emit('resume')
- this._process()
-}
+RandomReadStream.prototype.resume = function() {
+ // console.error("rrs resume");
+ this._paused = false;
+ this.emit('resume');
+ this._process();
+};
-RandomReadStream.prototype._process = function () {
- if (this._processing) return
- if (this._paused) return
+RandomReadStream.prototype._process = function() {
+ if (this._processing) return;
+ if (this._paused) return;
- this._processing = true
+ this._processing = true;
if (!this._remaining) {
- this._hash = this._hasher.digest('hex').toLowerCase().trim()
- this._processing = false
+ this._hash = this._hasher.digest('hex').toLowerCase().trim();
+ this._processing = false;
- this.emit('end')
- return
+ this.emit('end');
+ return;
}
// figure out how many bytes to output
// if finished, then just emit end.
- var block = this._opt.block
- var jitter = this._opt.jitter
+ var block = this._opt.block;
+ var jitter = this._opt.jitter;
if (jitter) {
- block += Math.ceil(Math.random() * jitter - (jitter / 2))
+ block += Math.ceil(Math.random() * jitter - (jitter / 2));
}
- block = Math.min(block, this._remaining)
- var buf = new Buffer(block)
+ block = Math.min(block, this._remaining);
+ var buf = Buffer.allocUnsafe(block);
for (var i = 0; i < block; i++) {
- buf[i] = Math.random() * 256
+ buf[i] = Math.random() * 256;
}
- this._hasher.update(buf)
+ this._hasher.update(buf);
- this._remaining -= block
+ this._remaining -= block;
- // console.error('block=%d\nremain=%d\n', block, this._remaining)
- this._processing = false
+ console.error('block=%d\nremain=%d\n', block, this._remaining);
+ this._processing = false;
+
+ this.emit('data', buf);
+ process.nextTick(this._process);
+};
- this.emit('data', buf)
- process.nextTick(this._process)
-}
// a filter that just verifies a shasum
-function HashStream () {
- Stream.call(this)
+function HashStream() {
+ Stream.call(this);
- this.readable = this.writable = true
- this._hasher = crypto.createHash('sha1')
+ this.readable = this.writable = true;
+ this._hasher = crypto.createHash('sha1');
}
-util.inherits(HashStream, Stream)
+util.inherits(HashStream, Stream);
-HashStream.prototype.write = function (c) {
+HashStream.prototype.write = function(c) {
// Simulate the way that an fs.ReadStream returns false
// on *every* write like a jerk, only to resume a
// moment later.
- this._hasher.update(c)
- process.nextTick(this.resume.bind(this))
- return false
-}
+ this._hasher.update(c);
+ process.nextTick(this.resume.bind(this));
+ return false;
+};
-HashStream.prototype.resume = function () {
- this.emit('resume')
- process.nextTick(this.emit.bind(this, 'drain'))
-}
+HashStream.prototype.resume = function() {
+ this.emit('resume');
+ process.nextTick(this.emit.bind(this, 'drain'));
+};
-HashStream.prototype.end = function (c) {
+HashStream.prototype.end = function(c) {
if (c) {
- this.write(c)
+ this.write(c);
}
- this._hash = this._hasher.digest('hex').toLowerCase().trim()
- this.emit('data', this._hash)
- this.emit('end')
-}
+ this._hash = this._hasher.digest('hex').toLowerCase().trim();
+ this.emit('data', this._hash);
+ this.emit('end');
+};
+
+
+var inp = new RandomReadStream({ total: 1024, block: 256, jitter: 16 });
+var out = new HashStream();
+var gzip = zlib.createGzip();
+var gunz = zlib.createGunzip();
+
+inp.pipe(gzip).pipe(gunz).pipe(out);
+
+inp.on('data', function(c) {
+ console.error('inp data', c.length);
+});
+
+gzip.on('data', function(c) {
+ console.error('gzip data', c.length);
+});
+
+gunz.on('data', function(c) {
+ console.error('gunz data', c.length);
+});
+
+out.on('data', function(c) {
+ console.error('out data', c.length);
+});
-describe('zlib - random byte pipes', function () {
- it('works', function (done) {
- var inp = new RandomReadStream({ total: 1024, block: 256, jitter: 16 })
- var out = new HashStream()
- var gzip = zlib.createGzip()
- var gunz = zlib.createGunzip()
-
- inp.pipe(gzip).pipe(gunz).pipe(out)
-
- // inp.on('data', function (c) {
- // console.error('inp data', c.length)
- // })
-
- // gzip.on('data', function (c) {
- // console.error('gzip data', c.length)
- // })
-
- // gunz.on('data', function (c) {
- // console.error('gunz data', c.length)
- // })
-
- // out.on('data', function (c) {
- // console.error('out data', c.length)
- // })
-
- var didSomething = false
- out.on('data', function (c) {
- didSomething = true
- // console.error('hash=%s', c)
- assert.equal(c, inp._hash, 'hashes should match')
- })
-
- out.on('end', function () {
- assert(didSomething, 'should have done something')
- done()
- })
- })
-})
+out.on('data', common.mustCall(function(c) {
+ console.error('hash=%s', c);
+ assert.equal(c, inp._hash, 'hashes should match');
+}));
diff --git a/test/test-zlib-sync-no-event.js b/test/test-zlib-sync-no-event.js
new file mode 100644
index 0000000..7a08f5c
--- /dev/null
+++ b/test/test-zlib-sync-no-event.js
@@ -0,0 +1,21 @@
+'use strict';
+require('./common');
+const zlib = require('../');
+const assert = require('assert');
+
+const shouldNotBeCalled = () => { throw new Error('unexpected event'); };
+
+const message = 'Come on, Fhqwhgads.';
+
+const zipper = new zlib.Gzip();
+zipper.on('close', shouldNotBeCalled);
+
+const buffer = new Buffer(message);
+const zipped = zipper._processChunk(buffer, zlib.Z_FINISH);
+
+const unzipper = new zlib.Gunzip();
+unzipper.on('close', shouldNotBeCalled);
+
+const unzipped = unzipper._processChunk(zipped, zlib.Z_FINISH);
+assert.notEqual(zipped.toString(), message);
+assert.strictEqual(unzipped.toString(), message);
diff --git a/test/test-zlib-truncated.js b/test/test-zlib-truncated.js
index 43ba020..08eb8fb 100644
--- a/test/test-zlib-truncated.js
+++ b/test/test-zlib-truncated.js
@@ -1,55 +1,67 @@
-/* eslint-env mocha */
-'use strict'
-
+'use strict';
// tests zlib streams with truncated compressed input
-var assert = require('assert')
-var zlib = require('../')
-
-var inputString = 'ΩΩLorem ipsum dolor sit amet, consectetur adipiscing el' +
- 'it. Morbi faucibus, purus at gravida dictum, libero arcu convallis la' +
- 'cus, in commodo libero metus eu nisi. Nullam commodo, neque nec porta' +
- ' placerat, nisi est fermentum augue, vitae gravida tellus sapien sit ' +
- 'amet tellus. Aenean non diam orci. Proin quis elit turpis. Suspendiss' +
- 'e non diam ipsum. Suspendisse nec ullamcorper odio. Vestibulum arcu m' +
- 'i, sodales non suscipit id, ultrices ut massa. Sed ac sem sit amet ar' +
- 'cu malesuada fermentum. Nunc sed. '
-
-describe('zlib - truncated', function () {
- [
- { comp: 'gzip', decomp: 'gunzip', decompSync: 'gunzipSync' },
- { comp: 'gzip', decomp: 'unzip', decompSync: 'unzipSync' },
- { comp: 'deflate', decomp: 'inflate', decompSync: 'inflateSync' },
- { comp: 'deflateRaw', decomp: 'inflateRaw', decompSync: 'inflateRawSync' }
- ].forEach(function (methods) {
- it(methods.comp, function (done) {
- zlib[methods.comp](inputString, function (err, compressed) {
- assert(!err)
- var truncated = compressed.slice(0, compressed.length / 2)
-
- // sync sanity
- assert.doesNotThrow(function () {
- var decompressed = zlib[methods.decompSync](compressed)
- assert.equal(decompressed, inputString)
- })
-
- // async sanity
- zlib[methods.decomp](compressed, function (err, result) {
- assert.ifError(err)
- assert.equal(result, inputString)
- })
-
- // sync truncated input test
- assert.throws(function () {
- zlib[methods.decompSync](truncated)
- }, /unexpected end of file/)
-
- // async truncated input test
- zlib[methods.decomp](truncated, function (err, result) {
- assert(/unexpected end of file/.test(err.message))
- })
-
- done()
- })
- })
- })
-})
+
+require('./common');
+const assert = require('assert');
+const zlib = require('../');
+
+const inputString = 'ΩΩLorem ipsum dolor sit amet, consectetur adipiscing eli' +
+ 't. Morbi faucibus, purus at gravida dictum, libero arcu ' +
+ 'convallis lacus, in commodo libero metus eu nisi. Nullam' +
+ ' commodo, neque nec porta placerat, nisi est fermentum a' +
+ 'ugue, vitae gravida tellus sapien sit amet tellus. Aenea' +
+ 'n non diam orci. Proin quis elit turpis. Suspendisse non' +
+ ' diam ipsum. Suspendisse nec ullamcorper odio. Vestibulu' +
+ 'm arcu mi, sodales non suscipit id, ultrices ut massa. S' +
+ 'ed ac sem sit amet arcu malesuada fermentum. Nunc sed. ';
+
+[
+ { comp: 'gzip', decomp: 'gunzip', decompSync: 'gunzipSync' },
+ { comp: 'gzip', decomp: 'unzip', decompSync: 'unzipSync' },
+ { comp: 'deflate', decomp: 'inflate', decompSync: 'inflateSync' },
+ { comp: 'deflateRaw', decomp: 'inflateRaw', decompSync: 'inflateRawSync' }
+].forEach(function(methods) {
+ zlib[methods.comp](inputString, function(err, compressed) {
+ assert(!err);
+ const truncated = compressed.slice(0, compressed.length / 2);
+
+ // sync sanity
+ assert.doesNotThrow(function() {
+ const decompressed = zlib[methods.decompSync](compressed);
+ assert.equal(decompressed, inputString);
+ });
+
+ // async sanity
+ zlib[methods.decomp](compressed, function(err, result) {
+ assert.ifError(err);
+ assert.equal(result, inputString);
+ });
+
+ // sync truncated input test
+ assert.throws(function() {
+ zlib[methods.decompSync](truncated);
+ }, /unexpected end of file/);
+
+ // async truncated input test
+ zlib[methods.decomp](truncated, function(err, result) {
+ assert(/unexpected end of file/.test(err.message));
+ });
+
+ const syncFlushOpt = { finishFlush: zlib.Z_SYNC_FLUSH };
+
+ // sync truncated input test, finishFlush = Z_SYNC_FLUSH
+ assert.doesNotThrow(function() {
+ const result = zlib[methods.decompSync](truncated, syncFlushOpt)
+ .toString();
+ assert.equal(result, inputString.substr(0, result.length));
+ });
+
+ // async truncated input test, finishFlush = Z_SYNC_FLUSH
+ zlib[methods.decomp](truncated, syncFlushOpt, function(err, decompressed) {
+ assert.ifError(err);
+
+ const result = decompressed.toString();
+ assert.equal(result, inputString.substr(0, result.length));
+ });
+ });
+});
diff --git a/test/test-zlib-unzip-one-byte-chunks.js b/test/test-zlib-unzip-one-byte-chunks.js
new file mode 100644
index 0000000..22f36e0
--- /dev/null
+++ b/test/test-zlib-unzip-one-byte-chunks.js
@@ -0,0 +1,28 @@
+'use strict';
+const common = require('./common');
+const assert = require('assert');
+const zlib = require('../');
+
+const data = Buffer.concat([
+ zlib.gzipSync('abc'),
+ zlib.gzipSync('def')
+]);
+
+const resultBuffers = [];
+
+const unzip = zlib.createUnzip()
+ .on('error', (err) => {
+ assert.ifError(err);
+ })
+ .on('data', (data) => resultBuffers.push(data))
+ .on('finish', common.mustCall(() => {
+ assert.deepStrictEqual(Buffer.concat(resultBuffers).toString(), 'abcdef',
+ 'result should match original string');
+ }));
+
+for (let i = 0; i < data.length; i++) {
+ // Write each single byte individually.
+ unzip.write(Buffer.from([data[i]]));
+}
+
+unzip.end();
diff --git a/test/test-zlib-write-after-close.js b/test/test-zlib-write-after-close.js
index 1fad3de..c14c518 100644
--- a/test/test-zlib-write-after-close.js
+++ b/test/test-zlib-write-after-close.js
@@ -1,20 +1,12 @@
-/* eslint-env mocha */
-'use strict'
+'use strict';
+const common = require('./common');
+var assert = require('assert');
+var zlib = require('../');
-var assert = require('assert')
-var zlib = require('../')
-
-describe('zlib - write after close', function () {
- it('works', function (done) {
- zlib.gzip('hello', function (err, out) {
- if (err) throw err
- var unzip = zlib.createGunzip()
- unzip.close(function () {
- done()
- })
- assert.throws(function () {
- unzip.write(out)
- })
- })
- })
-})
+zlib.gzip('hello', common.mustCall(function(err, out) {
+ var unzip = zlib.createGunzip();
+ unzip.close(common.mustCall(function() {}));
+ assert.throws(function() {
+ unzip.write(out);
+ });
+}));
diff --git a/test/test-zlib-write-after-flush.js b/test/test-zlib-write-after-flush.js
index b3b6f70..2ab74be 100755
--- a/test/test-zlib-write-after-flush.js
+++ b/test/test-zlib-write-after-flush.js
@@ -1,39 +1,33 @@
-/* eslint-env mocha */
-'use strict'
-
-var assert = require('assert')
-var zlib = require('../')
-
-describe('zlib - write after flush', function () {
- it('works', function (done) {
- var gzip = zlib.createGzip()
- var gunz = zlib.createUnzip()
-
- gzip.pipe(gunz)
-
- var output = ''
- var input = 'A line of data\n'
- gunz.setEncoding('utf8')
- gunz.on('data', function (c) {
- output += c
- })
-
- // make sure that flush/write doesn't trigger an assert failure
- gzip.flush()
- write()
-
- gunz.on('end', function () {
- assert.equal(output, input)
-
- // Make sure that the flush flag was set back to normal
- assert.equal(gzip._flushFlag, zlib.Z_NO_FLUSH)
- done()
- })
-
- function write () {
- gzip.write(input)
- gzip.end()
- gunz.read(0)
- }
- })
-})
+'use strict';
+require('./common');
+var assert = require('assert');
+var zlib = require('../');
+
+var gzip = zlib.createGzip();
+var gunz = zlib.createUnzip();
+
+gzip.pipe(gunz);
+
+var output = '';
+var input = 'A line of data\n';
+gunz.setEncoding('utf8');
+gunz.on('data', function(c) {
+ output += c;
+});
+
+process.on('exit', function() {
+ assert.equal(output, input);
+
+ // Make sure that the flush flag was set back to normal
+ assert.equal(gzip._flushFlag, zlib.Z_NO_FLUSH);
+
+ console.log('ok');
+});
+
+// make sure that flush/write doesn't trigger an assert failure
+gzip.flush(); write();
+function write() {
+ gzip.write(input);
+ gzip.end();
+ gunz.read(0);
+}
diff --git a/test/test-zlib-zero-byte.js b/test/test-zlib-zero-byte.js
index 5e6928b..047f669 100755
--- a/test/test-zlib-zero-byte.js
+++ b/test/test-zlib-zero-byte.js
@@ -1,29 +1,18 @@
-/* eslint-env mocha */
-'use strict'
+'use strict';
+const common = require('./common');
+var assert = require('assert');
-var assert = require('assert')
-var zlib = require('../')
+var zlib = require('../');
+var gz = zlib.Gzip();
+var emptyBuffer = Buffer.alloc(0);
+var received = 0;
+gz.on('data', function(c) {
+ received += c.length;
+});
-describe('zlib - zero byte', function () {
- it('works', function (done) {
- var gz = zlib.Gzip()
- var emptyBuffer = new Buffer(0)
- var received = 0
- gz.on('data', function (c) {
- received += c.length
- })
-
- var finished = false
- gz.on('end', function () {
- assert.equal(received, 20)
- assert(finished)
- done()
- })
-
- gz.on('finish', function () {
- finished = true
- })
- gz.write(emptyBuffer)
- gz.end()
- })
-})
+gz.on('end', common.mustCall(function() {
+ assert.strictEqual(received, 20);
+}));
+gz.on('finish', common.mustCall(function() {}));
+gz.write(emptyBuffer);
+gz.end();
diff --git a/test/test-zlib.js b/test/test-zlib.js
index 56d17e7..d8c1548 100644
--- a/test/test-zlib.js
+++ b/test/test-zlib.js
@@ -1,195 +1,207 @@
-/* eslint-env mocha */
-'use strict'
-
-var zlib = require('../')
-var path = require('path')
-
-var zlibPairs = [
- [zlib.Deflate, zlib.Inflate],
- [zlib.Gzip, zlib.Gunzip],
- [zlib.Deflate, zlib.Unzip],
- [zlib.Gzip, zlib.Unzip],
- [zlib.DeflateRaw, zlib.InflateRaw]
-]
+'use strict';
+var common = require('./common');
+var assert = require('assert');
+var zlib = require('../');
+var path = require('path');
+
+var zlibPairs =
+ [[zlib.Deflate, zlib.Inflate],
+ [zlib.Gzip, zlib.Gunzip],
+ [zlib.Deflate, zlib.Unzip],
+ [zlib.Gzip, zlib.Unzip],
+ [zlib.DeflateRaw, zlib.InflateRaw]];
// how fast to trickle through the slowstream
-var trickle = [128, 1024, 1024 * 1024]
+var trickle = [128, 1024, 1024 * 1024];
// tunable options for zlib classes.
// several different chunk sizes
-var chunkSize = [128, 1024, 1024 * 16, 1024 * 1024]
+var chunkSize = [128, 1024, 1024 * 16, 1024 * 1024];
// this is every possible value.
-var level = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-var windowBits = [8, 9, 10, 11, 12, 13, 14, 15]
-var memLevel = [1, 2, 3, 4, 5, 6, 7, 8, 9]
-var strategy = [0, 1, 2, 3, 4]
+var level = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
+var windowBits = [8, 9, 10, 11, 12, 13, 14, 15];
+var memLevel = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+var strategy = [0, 1, 2, 3, 4];
// it's nice in theory to test every combination, but it
// takes WAY too long. Maybe a pummel test could do this?
if (!process.env.PUMMEL) {
- trickle = [1024]
- chunkSize = [1024 * 16]
- level = [6]
- memLevel = [8]
- windowBits = [15]
- strategy = [0]
+ trickle = [1024];
+ chunkSize = [1024 * 16];
+ level = [6];
+ memLevel = [8];
+ windowBits = [15];
+ strategy = [0];
}
-var fs = require('fs')
-
-var tests = {
- 'person.jpg': fs.readFileSync(path.join(__dirname, 'fixtures', 'person.jpg')),
- 'elipses.txt': fs.readFileSync(path.join(__dirname, 'fixtures', 'elipses.txt')),
- 'empty.txt': new Buffer('')
-}
+var fs = require('fs');
-var util = require('util')
-var stream = require('stream')
+var testFiles = ['person.jpg', 'elipses.txt', 'empty.txt'];
-// stream that saves everything
-function BufferStream () {
- this.chunks = []
- this.length = 0
- this.writable = true
- this.readable = true
+if (process.env.FAST) {
+ zlibPairs = [[zlib.Gzip, zlib.Unzip]];
+ testFiles = ['person.jpg'];
}
-util.inherits(BufferStream, stream.Stream)
+var tests = {};
+testFiles.forEach(function(file) {
+ tests[file] = fs.readFileSync(path.resolve(common.fixturesDir, file));
+});
-BufferStream.prototype.write = function (c) {
- this.chunks.push(c)
- this.length += c.length
- return true
-}
+var util = require('util');
+var stream = require('stream');
-BufferStream.prototype.end = function (c) {
- if (c) this.write(c)
- // flatten
- var buf = new Buffer(this.length)
- var i = 0
- this.chunks.forEach(function (c) {
- c.copy(buf, i)
- i += c.length
- })
- this.emit('data', buf)
- this.emit('end')
- return true
-}
-function SlowStream (trickle) {
- this.trickle = trickle
- this.offset = 0
- this.readable = this.writable = true
+// stream that saves everything
+function BufferStream() {
+ this.chunks = [];
+ this.length = 0;
+ this.writable = true;
+ this.readable = true;
}
-util.inherits(SlowStream, stream.Stream)
+util.inherits(BufferStream, stream.Stream);
-SlowStream.prototype.write = function () {
- throw new Error('not implemented, just call ss.end(chunk)')
-}
+BufferStream.prototype.write = function(c) {
+ this.chunks.push(c);
+ this.length += c.length;
+ return true;
+};
-SlowStream.prototype.pause = function () {
- this.paused = true
- this.emit('pause')
+BufferStream.prototype.end = function(c) {
+ if (c) this.write(c);
+ // flatten
+ var buf = Buffer.allocUnsafe(this.length);
+ var i = 0;
+ this.chunks.forEach(function(c) {
+ c.copy(buf, i);
+ i += c.length;
+ });
+ this.emit('data', buf);
+ this.emit('end');
+ return true;
+};
+
+
+function SlowStream(trickle) {
+ this.trickle = trickle;
+ this.offset = 0;
+ this.readable = this.writable = true;
}
-SlowStream.prototype.resume = function () {
- var self = this
- if (self.ended) return
- self.emit('resume')
- if (!self.chunk) return
- self.paused = false
- emit()
- function emit () {
- if (self.paused) return
+util.inherits(SlowStream, stream.Stream);
+
+SlowStream.prototype.write = function() {
+ throw new Error('not implemented, just call ss.end(chunk)');
+};
+
+SlowStream.prototype.pause = function() {
+ this.paused = true;
+ this.emit('pause');
+};
+
+SlowStream.prototype.resume = function() {
+ var self = this;
+ if (self.ended) return;
+ self.emit('resume');
+ if (!self.chunk) return;
+ self.paused = false;
+ emit();
+ function emit() {
+ if (self.paused) return;
if (self.offset >= self.length) {
- self.ended = true
- return self.emit('end')
+ self.ended = true;
+ return self.emit('end');
}
- var end = Math.min(self.offset + self.trickle, self.length)
- var c = self.chunk.slice(self.offset, end)
- self.offset += c.length
- self.emit('data', c)
- process.nextTick(emit)
+ var end = Math.min(self.offset + self.trickle, self.length);
+ var c = self.chunk.slice(self.offset, end);
+ self.offset += c.length;
+ self.emit('data', c);
+ process.nextTick(emit);
}
-}
+};
-SlowStream.prototype.end = function (chunk) {
+SlowStream.prototype.end = function(chunk) {
// walk over the chunk in blocks.
- this.chunk = chunk
- this.length = chunk.length
- this.resume()
- return this.ended
-}
+ this.chunk = chunk;
+ this.length = chunk.length;
+ this.resume();
+ return this.ended;
+};
+
// for each of the files, make sure that compressing and
// decompressing results in the same data, for every combination
// of the options set above.
-
-describe('zlib', function () {
- Object.keys(tests).forEach(function (file) {
- var test = tests[file]
- chunkSize.forEach(function (chunkSize) {
- trickle.forEach(function (trickle) {
- windowBits.forEach(function (windowBits) {
- level.forEach(function (level) {
- memLevel.forEach(function (memLevel) {
- strategy.forEach(function (strategy) {
- zlibPairs.forEach(function (pair) {
- var Def = pair[0]
- var Inf = pair[1]
- var opts = { level: level,
- windowBits: windowBits,
- memLevel: memLevel,
- strategy: strategy }
-
- it(Def.name + '.' + Inf.name, function (done) {
- var def = new Def(opts)
- var inf = new Inf(opts)
- var ss = new SlowStream(trickle)
- var buf = new BufferStream()
-
- // verify that the same exact buffer comes out the other end.
- buf.on('data', function (c) {
- var msg = file + ' ' +
+var failures = 0;
+var total = 0;
+var done = 0;
+
+Object.keys(tests).forEach(function(file) {
+ var test = tests[file];
+ chunkSize.forEach(function(chunkSize) {
+ trickle.forEach(function(trickle) {
+ windowBits.forEach(function(windowBits) {
+ level.forEach(function(level) {
+ memLevel.forEach(function(memLevel) {
+ strategy.forEach(function(strategy) {
+ zlibPairs.forEach(function(pair) {
+ var Def = pair[0];
+ var Inf = pair[1];
+ var opts = { level: level,
+ windowBits: windowBits,
+ memLevel: memLevel,
+ strategy: strategy };
+
+ total++;
+
+ var def = new Def(opts);
+ var inf = new Inf(opts);
+ var ss = new SlowStream(trickle);
+ var buf = new BufferStream();
+
+ // verify that the same exact buffer comes out the other end.
+ buf.on('data', function(c) {
+ var msg = file + ' ' +
chunkSize + ' ' +
JSON.stringify(opts) + ' ' +
- Def.name + ' -> ' + Inf.name
- var ok = true
- for (var i = 0; i < Math.max(c.length, test.length); i++) {
- if (c[i] !== test[i]) {
- ok = false
- break
- }
- }
-
- if (ok) {
- done()
- } else {
- var errMsg = [
- 'not ok ' + msg,
- ' ...',
- ' testfile: ' + file,
- ' type: ' + Def.name + ' -> ' + Inf.name,
- ' position: ' + i,
- ' options: ' + JSON.stringify(opts),
- ' expect: ' + test[i],
- ' actual: ' + c[i],
- ' chunkSize: ' + chunkSize,
- ' ---'
- ].join('\n')
-
- throw new Error(errMsg)
- }
- })
-
- // the magic happens here.
- ss.pipe(def).pipe(inf).pipe(buf)
- ss.end(test)
- })
- })
- }) }) }) }) }) }) // sad stallman is sad.
- })
-})
+ Def.name + ' -> ' + Inf.name;
+ var ok = true;
+ var testNum = ++done;
+ for (var i = 0; i < Math.max(c.length, test.length); i++) {
+ if (c[i] !== test[i]) {
+ ok = false;
+ failures++;
+ break;
+ }
+ }
+ if (ok) {
+ console.log('ok ' + (testNum) + ' ' + msg);
+ } else {
+ console.log('not ok ' + (testNum) + ' ' + msg);
+ console.log(' ...');
+ console.log(' testfile: ' + file);
+ console.log(' type: ' + Def.name + ' -> ' + Inf.name);
+ console.log(' position: ' + i);
+ console.log(' options: ' + JSON.stringify(opts));
+ console.log(' expect: ' + test[i]);
+ console.log(' actual: ' + c[i]);
+ console.log(' chunkSize: ' + chunkSize);
+ console.log(' ---');
+ }
+ });
+
+ // the magic happens here.
+ ss.pipe(def).pipe(inf).pipe(buf);
+ ss.end(test);
+ });
+ }); }); }); }); }); }); // sad stallman is sad.
+});
+
+process.on('exit', function(code) {
+ console.log('1..' + done);
+ assert.equal(done, total, (total - done) + ' tests left unfinished');
+ assert.ok(!failures, 'some test failures');
+});
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/node-browserify-zlib.git
More information about the Pkg-javascript-commits
mailing list