[Pkg-javascript-commits] [node-tar-stream] 01/02: Imported Upstream version 0.4.3

Andrew Kelley andrewrk-guest at moszumanska.debian.org
Mon Jun 30 07:11:58 UTC 2014


This is an automated email from the git hooks/post-receive script.

andrewrk-guest pushed a commit to branch master
in repository node-tar-stream.

commit 061085111407025bc7c17dba4b5ca0f401d7815e
Author: Andrew Kelley <superjoe30 at gmail.com>
Date:   Mon Jun 30 06:47:42 2014 +0000

    Imported Upstream version 0.4.3
---
 .gitignore                    |   2 +
 .travis.yml                   |   6 +
 README.md                     | 119 ++++++++++++
 extract.js                    | 194 +++++++++++++++++++
 headers.js                    | 218 ++++++++++++++++++++++
 index.js                      |   2 +
 pack.js                       | 194 +++++++++++++++++++
 package.json                  |  39 ++++
 test/extract.js               | 420 ++++++++++++++++++++++++++++++++++++++++++
 test/fixtures/index.js        |  10 +
 test/fixtures/invalid.tgz     | Bin 0 -> 59510 bytes
 test/fixtures/long-name.tar   | Bin 0 -> 2048 bytes
 test/fixtures/multi-file.tar  | Bin 0 -> 3072 bytes
 test/fixtures/name-is-100.tar | Bin 0 -> 10240 bytes
 test/fixtures/one-file.tar    | Bin 0 -> 2048 bytes
 test/fixtures/types.tar       | Bin 0 -> 2048 bytes
 test/fixtures/unicode-bsd.tar | Bin 0 -> 10240 bytes
 test/fixtures/unicode.tar     | Bin 0 -> 3072 bytes
 test/pack.js                  | 144 +++++++++++++++
 19 files changed, 1348 insertions(+)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..7938f33
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+node_modules
+sandbox.js
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..9672e12
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,6 @@
+language: node_js
+node_js:
+  - "0.8"
+  - "0.10"
+before_install:
+  - npm install -g npm@~1.4.6
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..7ae980a
--- /dev/null
+++ b/README.md
@@ -0,0 +1,119 @@
+# tar-stream
+
+tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.
+
+	npm install tar-stream
+
+[![build status](https://secure.travis-ci.org/mafintosh/tar-stream.png)](http://travis-ci.org/mafintosh/tar-stream)
+
+# Usage
+
+tar-stream exposes two streams, [pack](https://github.com/mafintosh/tar-stream#packing) which creates tarballs and [extract](https://github.com/mafintosh/tar-stream#extracting) which extracts tarballs. To [modify an existing tarball](https://github.com/mafintosh/tar-stream#modifying-existing-tarballs) use both.
+
+## Packing
+
+To create a pack stream use `tar.pack()` and call `pack.entry(header, [callback])` to add tar entries.
+
+``` js
+var tar = require('tar-stream');
+var pack = tar.pack(); // p is a streams2 stream
+
+// add a file called my-test.txt with the content "Hello World!"
+pack.entry({ name: 'my-test.txt' }, 'Hello World!');
+
+// add a file called my-stream-test.txt from a stream
+var entry = pack.entry({ name: 'my-stream-test.txt' }, function(err) {
+	// the stream was added
+	// no more entries
+	pack.finalize();
+});
+myStream.pipe(entry);
+
+// pipe the pack stream somewhere
+pack.pipe(process.stdout);
+```
+
+## Extracting
+
+To extract a stream use `tar.extract()` and listen for `extract.on('entry', header, stream, callback)`
+
+``` js
+var extract = tar.extract();
+
+extract.on('entry', function(header, stream, callback) {
+	// header is the tar header
+	// stream is the content body (might be an empty stream)
+	// call next when you are done with this entry
+
+	stream.resume(); // just auto drain the stream
+	stream.on('end', function() {
+		callback(); // ready for next entry
+	});
+});
+
+extract.on('finish', function() {
+	// all entries read
+});
+
+pack.pipe(extract);
+```
+
+## Headers
+
+The header object using in `entry` should contain the following properties.
+Most of these values can be found by stating a file.
+
+``` js
+{
+	name: 'path/to/this/entry.txt',
+	size: 1314,        // entry size. defaults to 0
+	mode: 0644,        // entry mode. defaults to to 0755 for dirs and 0644 otherwise
+	mtime: new Date(), // last modified date for entry. defaults to now.
+	type: 'file',      // type of entry. defaults to file. can be:
+	                   // file | link | symlink | directory | block-device
+	                   // character-device | fifo | contigious-file
+	linkname: 'path',  // linked file name
+	uid: 0,            // uid of entry owner. defaults to 0
+	gid: 0,            // gid of entry owner. defaults to 0
+	uname: 'maf',      // uname of entry owner. defaults to null
+	gname: 'staff',    // gname of entry owner. defaults to null
+	devmajor: 0,       // device major version. defaults to 0
+	devminor: 0        // device minor version. defaults to 0
+}
+```
+
+## Modifying existing tarballs
+
+Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball.
+
+``` js
+var extract = tar.extract();
+var pack = tar.pack();
+var path = require('path');
+
+extract.on('entry', function(header, stream, callback) {
+	// let's prefix all names with 'tmp'
+	header.name = path.join('tmp', header.name);
+	// write the new entry to the pack stream
+	stream.pipe(pack.entry(header, callback));
+});
+
+extract.on('finish', function() {
+	// all entries done - lets finalize it
+	pack.finalize();
+});
+
+// pipe the old tarball to the extractor
+oldTarball.pipe(extract);
+
+// pipe the new tarball the another stream
+pack.pipe(newTarball);
+```
+
+## Performance
+
+[See tar-fs for a performance comparison with node-tar](https://github.com/mafintosh/tar-fs/blob/master/README.md#performance)
+
+# License
+
+MIT
diff --git a/extract.js b/extract.js
new file mode 100644
index 0000000..c5954eb
--- /dev/null
+++ b/extract.js
@@ -0,0 +1,194 @@
+var util = require('util');
+var bl = require('bl');
+var xtend = require('xtend');
+var headers = require('./headers');
+
+var Writable = require('readable-stream').Writable;
+var PassThrough = require('readable-stream').PassThrough;
+
+var noop = function() {};
+
+var overflow = function(size) {
+	size &= 511;
+	return size && 512 - size;
+};
+
+var emptyStream = function() {
+	var s = new PassThrough();
+	s.end();
+	return s;
+};
+
+var mixinPax = function(header, pax) {
+	if (pax.path) header.name = pax.path;
+	if (pax.linkpath) header.linkname = pax.linkpath;
+	return header;
+};
+
+var Extract = function(opts) {
+	if (!(this instanceof Extract)) return new Extract(opts);
+	Writable.call(this, opts);
+
+	this._buffer = bl();
+	this._missing = 0;
+	this._onparse = noop;
+	this._header = null;
+	this._stream = null;
+	this._overflow = null;
+	this._cb = null;
+	this._locked = false;
+	this._destroyed = false;
+	this._pax = null;
+	this._paxGlobal = null;
+
+	var self = this;
+	var b = self._buffer;
+
+	var oncontinue = function() {
+		self._continue();
+	};
+
+	var onunlock = function(err) {
+		self._locked = false;
+		if (err) return self.destroy(err);
+		if (!self._stream) oncontinue();
+	};
+
+	var onstreamend = function() {
+		self._stream = null;
+		var drain = overflow(self._header.size);
+		if (drain) self._parse(drain, ondrain);
+		else self._parse(512, onheader);
+		if (!self._locked) oncontinue();
+	};
+
+	var ondrain = function() {
+		self._buffer.consume(overflow(self._header.size));
+		self._parse(512, onheader);
+		oncontinue();
+	};
+
+	var onpaxglobalheader = function() {
+		var size = self._header.size;
+		self._paxGlobal = headers.decodePax(b.slice(0, size));
+		b.consume(size);
+		onstreamend();
+	}
+
+	var onpaxheader = function() {
+		var size = self._header.size;
+		self._pax = headers.decodePax(b.slice(0, size));
+		if (self._paxGlobal) self._pax = xtend(self._paxGlobal, self._pax);
+		b.consume(size);
+		onstreamend();
+	};
+
+	var onheader = function() {
+		var header
+		try {
+			header = self._header = headers.decode(b.slice(0, 512));
+		} catch (err) {
+			self.emit('error', err);
+		}
+		b.consume(512);
+
+		if (!header) {
+			self._parse(512, onheader);
+			oncontinue();
+			return;
+		}
+		if (header.type === 'pax-global-header') {
+			self._parse(header.size, onpaxglobalheader);
+			oncontinue();
+			return;
+		}
+		if (header.type === 'pax-header') {
+			self._parse(header.size, onpaxheader);
+			oncontinue();
+			return;
+		}
+
+		if (self._pax) {
+			self._header = header = mixinPax(header, self._pax);
+			self._pax = null;
+		}
+
+		self._locked = true;
+
+		if (!header.size) {
+			self._parse(512, onheader);
+			self.emit('entry', header, emptyStream(), onunlock);
+			return;
+		}
+
+		self._stream = new PassThrough();
+
+		self.emit('entry', header, self._stream, onunlock);
+		self._parse(header.size, onstreamend);
+		oncontinue();
+	};
+
+	this._parse(512, onheader);
+};
+
+util.inherits(Extract, Writable);
+
+Extract.prototype.destroy = function(err) {
+	if (this._destroyed) return;
+	this._destroyed = true;
+
+	if (err) this.emit('error', err);
+	this.emit('close');
+	if (this._stream) this._stream.emit('close');
+};
+
+Extract.prototype._parse = function(size, onparse) {
+	if (this._destroyed) return;
+	this._missing = size;
+	this._onparse = onparse;
+};
+
+Extract.prototype._continue = function(err) {
+	if (this._destroyed) return;
+	var cb = this._cb;
+	this._cb = noop;
+	if (this._overflow) this._write(this._overflow, undefined, cb);
+	else cb();
+};
+
+Extract.prototype._write = function(data, enc, cb) {
+	if (this._destroyed) return;
+
+	var s = this._stream;
+	var b = this._buffer;
+	var missing = this._missing;
+
+	// we do not reach end-of-chunk now. just forward it
+
+	if (data.length < missing) {
+		this._missing -= data.length;
+		this._overflow = null;
+		if (s) return s.write(data, cb);
+		b.append(data);
+		return cb();
+	}
+
+	// end-of-chunk. the parser should call cb.
+
+	this._cb = cb;
+	this._missing = 0;
+
+	var overflow = null;
+	if (data.length > missing) {
+		overflow = data.slice(missing);
+		data = data.slice(0, missing);
+	}
+
+	if (s) s.end(data);
+	else b.append(data);
+
+	this._overflow = overflow;
+	this._onparse();
+};
+
+module.exports = Extract;
diff --git a/headers.js b/headers.js
new file mode 100644
index 0000000..5ea4333
--- /dev/null
+++ b/headers.js
@@ -0,0 +1,218 @@
+var ZEROS = '0000000000000000000';
+var ZERO_OFFSET = '0'.charCodeAt(0);
+var USTAR = 'ustar\x0000';
+
+var clamp = function(index, len, defaultValue) {
+	if (typeof index !== 'number') return defaultValue;
+	index = ~~index;  // Coerce to integer.
+	if (index >= len) return len;
+	if (index >= 0) return index;
+	index += len;
+	if (index >= 0) return index;
+	return 0;
+};
+
+var toType = function(flag) {
+	switch (flag) {
+		case 0:
+		return 'file';
+		case 1:
+		return 'link';
+		case 2:
+		return 'symlink';
+		case 3:
+		return 'character-device';
+		case 4:
+		return 'block-device';
+		case 5:
+		return 'directory';
+		case 6:
+		return 'fifo';
+		case 7:
+		return 'contiguous-file';
+		case 72:
+		return 'pax-header';
+		case 55:
+		return 'pax-global-header'
+	}
+
+	return null;
+};
+
+var toTypeflag = function(flag) {
+	switch (flag) {
+		case 'file':
+		return 0;
+		case 'link':
+		return 1;
+		case 'symlink':
+		return 2;
+		case 'character-device':
+		return 3;
+		case 'block-device':
+		return 4;
+		case 'directory':
+		return 5;
+		case 'fifo':
+		return 6;
+		case 'contiguous-file':
+		return 7;
+		case 'pax-header':
+		return 72;
+	}
+
+	return 0;
+};
+
+var alloc = function(size) {
+	var buf = new Buffer(size);
+	buf.fill(0);
+	return buf;
+};
+
+var indexOf = function(block, num, offset, end) {
+	for (; offset < end; offset++) {
+		if (block[offset] === num) return offset;
+	}
+	return end;
+};
+
+var cksum = function(block) {
+	var sum = 8 * 32;
+	for (var i = 0; i < 148; i++)   sum += block[i];
+	for (var i = 156; i < 512; i++) sum += block[i];
+	return sum;
+};
+
+var encodeOct = function(val, n) {
+	val = val.toString(8);
+	return ZEROS.slice(0, n-val.length)+val+' ';
+};
+
+var decodeOct = function(val, offset) {
+	return parseInt(val.slice(offset, clamp(indexOf(val, 32, offset, val.length), val.length, val.length)).toString(), 8);
+};
+
+var decodeStr = function(val, offset, length) {
+	return val.slice(offset, indexOf(val, 0, offset, offset+length)).toString();
+};
+
+var addLength = function(str) {
+	var len = Buffer.byteLength(str);
+	var digits = Math.floor(Math.log(len) / Math.log(10)) + 1;
+	if (len + digits > Math.pow(10, digits)) digits++;
+
+	return (len+digits)+str;
+};
+
+exports.encodePax = function(opts) { // TODO: encode more stuff in pax
+	var result = '';
+	if (opts.name) result += addLength(' path='+opts.name+'\n');
+	if (opts.linkname) result += addLength(' linkpath='+opts.linkname+'\n');
+	return new Buffer(result);
+};
+
+exports.decodePax = function(buf) {
+	var result = {};
+
+	while (buf.length) {
+		var i = 0;
+		for (; i < buf.length && buf[i] !== 32; i++);
+		var len = parseInt(buf.slice(0, i).toString());
+		if (!len) return result;
+
+		var b = buf.slice(i+1, len-1).toString();
+		var keyIndex = b.indexOf('=');
+		if (keyIndex === -1) return result;
+		result[b.slice(0, keyIndex)] = b.slice(keyIndex+1);
+
+		buf = buf.slice(len);
+	}
+
+	return result;
+};
+
+exports.encode = function(opts) {
+	var buf = alloc(512);
+	var name = opts.name;
+	var prefix = '';
+
+	if (opts.typeflag === 5 && name[name.length-1] !== '/') name += '/';
+	if (Buffer.byteLength(name) !== name.length) return null; // utf-8
+
+	while (Buffer.byteLength(name) > 100) {
+		var i = name.indexOf('/');
+		if (i === -1) return null;
+		prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i);
+		name = name.slice(i+1);
+	}
+
+	if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null;
+	if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null;
+
+	buf.write(name);
+	buf.write(encodeOct(opts.mode & 07777, 6), 100);
+	buf.write(encodeOct(opts.uid, 6), 108);
+	buf.write(encodeOct(opts.gid, 6), 116);
+	buf.write(encodeOct(opts.size, 11), 124);
+	buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136);
+
+	buf[156] = ZERO_OFFSET + toTypeflag(opts.type);
+
+	if (opts.linkname) buf.write(opts.linkname, 157);
+
+	buf.write(USTAR, 257);
+	if (opts.uname) buf.write(opts.uname, 265);
+	if (opts.gname) buf.write(opts.gname, 297);
+	buf.write(encodeOct(opts.devmajor || 0, 6), 329);
+	buf.write(encodeOct(opts.devminor || 0, 6), 337);
+
+	if (prefix) buf.write(prefix, 345);
+
+	buf.write(encodeOct(cksum(buf), 6), 148);
+
+	return buf;
+};
+
+exports.decode = function(buf) {
+	var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET;
+	var type = toType(typeflag);
+
+	var name = decodeStr(buf, 0, 100);
+	var mode = decodeOct(buf, 100);
+	var uid = decodeOct(buf, 108);
+	var gid = decodeOct(buf, 116);
+	var size = decodeOct(buf, 124);
+	var mtime = decodeOct(buf, 136);
+	var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100);
+	var uname = decodeStr(buf, 265, 32);
+	var gname = decodeStr(buf, 297, 32);
+	var devmajor = decodeOct(buf, 329);
+	var devminor = decodeOct(buf, 337);
+
+	if (buf[345]) name = decodeStr(buf, 345, 155)+'/'+name;
+
+	var c = cksum(buf)
+
+	//checksum is still initial value if header was null.
+	if (c === 8*32) return null;
+
+	//valid checksum
+	if (c !== decodeOct(buf, 148)) throw new Error('invalid header');
+
+	return {
+		name: name,
+		mode: mode,
+		uid: uid,
+		gid: gid,
+		size: size,
+		mtime: new Date(1000 * mtime),
+		type: toType(typeflag),
+		linkname: linkname,
+		uname: uname,
+		gname: gname,
+		devmajor: devmajor,
+		devminor: devminor
+	};
+};
+
diff --git a/index.js b/index.js
new file mode 100644
index 0000000..dbd60ed
--- /dev/null
+++ b/index.js
@@ -0,0 +1,2 @@
+exports.extract = require('./extract');
+exports.pack = require('./pack');
\ No newline at end of file
diff --git a/pack.js b/pack.js
new file mode 100644
index 0000000..df5f407
--- /dev/null
+++ b/pack.js
@@ -0,0 +1,194 @@
+var util = require('util');
+var eos = require('end-of-stream');
+var headers = require('./headers');
+
+var Readable = require('readable-stream').Readable;
+var Writable = require('readable-stream').Writable;
+var PassThrough = require('readable-stream').PassThrough;
+
+var END_OF_TAR = new Buffer(1024);
+END_OF_TAR.fill(0);
+
+var noop = function() {};
+
+var overflow = function(self, size) {
+	size &= 511;
+	if (size) self.push(END_OF_TAR.slice(0, 512 - size));
+};
+
+var Sink = function(to) {
+	Writable.call(this);
+	this.written = 0;
+	this._to = to;
+	this._destroyed = false;
+};
+
+util.inherits(Sink, Writable);
+
+Sink.prototype._write = function(data, enc, cb) {
+	this.written += data.length;
+	if (this._to.push(data)) return cb();
+	this._to._drain = cb;
+};
+
+Sink.prototype.destroy = function() {
+	if (this._destroyed) return;
+	this._destroyed = true;
+	this.emit('close');
+};
+
+var Void = function() {
+	Writable.call(this)
+	this._destroyed = false;
+};
+
+util.inherits(Void, Writable);
+
+Void.prototype._write = function(data, enc, cb) {
+	cb(new Error('No body allowed for this entry'))
+};
+
+Void.prototype.destroy = function() {
+	if (this._destroyed) return;
+	this._destroyed = true;
+	this.emit('close')
+}
+
+var Pack = function(opts) {
+	if (!(this instanceof Pack)) return new Pack(opts);
+	Readable.call(this, opts);
+
+	this._drain = noop;
+	this._finalized = false;
+	this._finalizing = false;
+	this._destroyed = false;
+	this._stream = null;
+};
+
+util.inherits(Pack, Readable);
+
+Pack.prototype.entry = function(header, buffer, callback) {
+	if (this._stream) throw new Error('already piping an entry');
+	if (this._finalized || this._destroyed) return;
+
+	if (typeof buffer === 'function') {
+		callback = buffer;
+		buffer = null;
+	}
+
+	if (!callback) callback = noop;
+
+	var self = this;
+
+	if (!header.size)  header.size = 0;
+	if (!header.type)  header.type = 'file';
+	if (!header.mode)  header.mode = header.type === 'directory' ? 0755 : 0644;
+	if (!header.uid)   header.uid = 0;
+	if (!header.gid)   header.gid = 0;
+	if (!header.mtime) header.mtime = new Date();
+
+	if (typeof buffer === 'string') buffer = new Buffer(buffer);
+	if (Buffer.isBuffer(buffer)) {
+		header.size = buffer.length;
+		this._encode(header);
+		this.push(buffer);
+		overflow(self, header.size);
+		process.nextTick(callback);
+		return new Void();
+	}
+	if (header.type !== 'file' && header.type !== 'contigious-file') {
+		this._encode(header);
+		process.nextTick(callback);
+		return new Void();
+	}
+
+	var sink = new Sink(this);
+
+	this._encode(header);
+	this._stream = sink;
+
+	eos(sink, function(err) {
+		self._stream = null;
+
+		if (err) { // stream was closed
+			self.destroy();
+			return callback(err);
+		}
+
+		if (sink.written !== header.size) { // corrupting tar
+			self.destroy();
+			return callback(new Error('size mismatch'));
+		}
+
+		overflow(self, header.size);
+		if (self._finalizing) self.finalize();
+		callback();
+	});
+
+	return sink;
+};
+
+Pack.prototype.finalize = function() {
+	if (this._stream) {
+		this._finalizing = true;
+		return;
+	}
+
+	if (this._finalized) return;
+	this._finalized = true;
+	this.push(END_OF_TAR);
+	this.push(null);
+};
+
+Pack.prototype.destroy = function(err) {
+	if (this._destroyed) return;
+	this._destroyed = true;
+
+	if (err) this.emit('error', err);
+	this.emit('close');
+	if (this._stream && this._stream.destroy) this._stream.destroy();
+};
+
+Pack.prototype._encode = function(header) {
+	var buf = headers.encode(header);
+	if (buf) this.push(buf);
+	else this._encodePax(header);
+};
+
+Pack.prototype._encodePax = function(header) {
+	var paxHeader = headers.encodePax({
+		name: header.name,
+		linkname: header.linkname
+	});
+
+	var newHeader = {
+		name: 'PaxHeader',
+		mode: header.mode,
+		uid: header.uid,
+		gid: header.gid,
+		size: paxHeader.length,
+		mtime: header.mtime,
+		type: 'pax-header',
+		linkname: header.linkname && 'PaxHeader',
+		uname: header.uname,
+		gname: header.gname,
+		devmajor: header.devmajor,
+		devminor: header.devminor
+	};
+
+	this.push(headers.encode(newHeader));
+	this.push(paxHeader);
+	overflow(this, paxHeader.length);
+
+	newHeader.size = header.size;
+	newHeader.type = header.type;
+	this.push(headers.encode(newHeader));
+};
+
+Pack.prototype._read = function(n) {
+	var drain = this._drain;
+	this._drain = noop;
+	drain();
+};
+
+module.exports = Pack;
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..c7c5a2c
--- /dev/null
+++ b/package.json
@@ -0,0 +1,39 @@
+{
+  "name": "tar-stream",
+  "version": "0.4.3",
+  "description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
+  "repository": "git://github.com:mafintosh/tar-stream.git",
+  "author": "Mathias Buus <mathiasbuus at gmail.com>",
+  "engines": {
+    "node": ">= 0.8.0"
+  },
+  "dependencies": {
+    "bl": "~0.8.1",
+    "end-of-stream": "~0.1.3",
+    "readable-stream": "~1.0.26-4",
+    "xtend": "~3.0.0"
+  },
+  "devDependencies": {
+    "tap": "~0.4.6",
+    "concat-stream": "~1.2.1"
+  },
+  "scripts": {
+    "test": "tap test/*.js"
+  },
+  "keywords": [
+    "tar",
+    "tarball",
+    "parse",
+    "parser",
+    "generate",
+    "generator",
+    "stream",
+    "stream2",
+    "streams",
+    "streams2",
+    "streaming",
+    "pack",
+    "extract",
+    "modify"
+  ]
+}
diff --git a/test/extract.js b/test/extract.js
new file mode 100644
index 0000000..c1e4fd3
--- /dev/null
+++ b/test/extract.js
@@ -0,0 +1,420 @@
+var test = require('tap').test;
+var tar = require('../index');
+var fixtures = require('./fixtures');
+var concat = require('concat-stream');
+var fs = require('fs');
+
+var clamp = function(index, len, defaultValue) {
+	if (typeof index !== 'number') return defaultValue;
+	index = ~~index;  // Coerce to integer.
+	if (index >= len) return len;
+	if (index >= 0) return index;
+	index += len;
+	if (index >= 0) return index;
+	return 0;
+};
+
+test('one-file', function(t) {
+	t.plan(3);
+
+	var extract = tar.extract();
+	var noEntries = false;
+
+	extract.on('entry', function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'test.txt',
+			mode: 0644,
+			uid: 501,
+			gid: 20,
+			size: 12,
+			mtime: new Date(1387580181000),
+			type: 'file',
+			linkname: null,
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+
+		stream.pipe(concat(function(data) {
+			noEntries = true;
+			t.same(data.toString(), 'hello world\n');
+			callback();
+		}));
+	});
+
+	extract.on('finish', function() {
+		t.ok(noEntries);
+	});
+
+	extract.end(fs.readFileSync(fixtures.ONE_FILE_TAR));
+});
+
+test('chunked-one-file', function(t) {
+	t.plan(3);
+
+	var extract = tar.extract();
+	var noEntries = false;
+
+	extract.on('entry', function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'test.txt',
+			mode: 0644,
+			uid: 501,
+			gid: 20,
+			size: 12,
+			mtime: new Date(1387580181000),
+			type: 'file',
+			linkname: null,
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+
+		stream.pipe(concat(function(data) {
+			noEntries = true;
+			t.same(data.toString(), 'hello world\n');
+			callback();
+		}));
+	});
+
+	extract.on('finish', function() {
+		t.ok(noEntries);
+	});
+
+	var b = fs.readFileSync(fixtures.ONE_FILE_TAR);
+
+	for (var i = 0; i < b.length; i += 321) {
+		extract.write(b.slice(i, clamp(i+321, b.length, b.length)));
+	}
+	extract.end();
+});
+
+
+test('multi-file', function(t) {
+	t.plan(5);
+
+	var extract = tar.extract();
+	var noEntries = false;
+
+	var onfile1 = function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'file-1.txt',
+			mode: 0644,
+			uid: 501,
+			gid: 20,
+			size: 12,
+			mtime: new Date(1387580181000),
+			type: 'file',
+			linkname: null,
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+
+		extract.on('entry', onfile2);
+		stream.pipe(concat(function(data) {
+			t.same(data.toString(), 'i am file-1\n');
+			callback();
+		}));
+	};
+
+	var onfile2 = function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'file-2.txt',
+			mode: 0644,
+			uid: 501,
+			gid: 20,
+			size: 12,
+			mtime: new Date(1387580181000),
+			type: 'file',
+			linkname: null,
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+
+		stream.pipe(concat(function(data) {
+			noEntries = true;
+			t.same(data.toString(), 'i am file-2\n');
+			callback();
+		}));
+	};
+
+	extract.once('entry', onfile1);
+
+	extract.on('finish', function() {
+		t.ok(noEntries);
+	});
+
+	extract.end(fs.readFileSync(fixtures.MULTI_FILE_TAR));
+});
+
+test('chunked-multi-file', function(t) {
+	t.plan(5);
+
+	var extract = tar.extract();
+	var noEntries = false;
+
+	var onfile1 = function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'file-1.txt',
+			mode: 0644,
+			uid: 501,
+			gid: 20,
+			size: 12,
+			mtime: new Date(1387580181000),
+			type: 'file',
+			linkname: null,
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+
+		extract.on('entry', onfile2);
+		stream.pipe(concat(function(data) {
+			t.same(data.toString(), 'i am file-1\n');
+			callback();
+		}));
+	};
+
+	var onfile2 = function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'file-2.txt',
+			mode: 0644,
+			uid: 501,
+			gid: 20,
+			size: 12,
+			mtime: new Date(1387580181000),
+			type: 'file',
+			linkname: null,
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+
+		stream.pipe(concat(function(data) {
+			noEntries = true;
+			t.same(data.toString(), 'i am file-2\n');
+			callback();
+		}));
+	};
+
+	extract.once('entry', onfile1);
+
+	extract.on('finish', function() {
+		t.ok(noEntries);
+	});
+
+	var b = fs.readFileSync(fixtures.MULTI_FILE_TAR);
+	for (var i = 0; i < b.length; i += 321) {
+		extract.write(b.slice(i, clamp(i+321, b.length, b.length)));
+	}
+	extract.end();
+});
+
+test('types', function(t) {
+	t.plan(3);
+
+	var extract = tar.extract();
+	var noEntries = false;
+
+	var ondir = function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'directory',
+			mode: 0755,
+			uid: 501,
+			gid: 20,
+			size: 0,
+			mtime: new Date(1387580181000),
+			type: 'directory',
+			linkname: null,
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+		stream.on('data', function() {
+			t.ok(false);
+		});
+		extract.once('entry', onlink);
+		callback();
+	};
+
+	var onlink = function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'directory-link',
+			mode: 0755,
+			uid: 501,
+			gid: 20,
+			size: 0,
+			mtime: new Date(1387580181000),
+			type: 'symlink',
+			linkname: 'directory',
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+		stream.on('data', function() {
+			t.ok(false);
+		});
+		noEntries = true;
+		callback();
+	};
+
+	extract.once('entry', ondir);
+
+	extract.on('finish', function() {
+		t.ok(noEntries);
+	});
+
+	extract.end(fs.readFileSync(fixtures.TYPES_TAR));
+});
+
+test('long-name', function(t) {
+	t.plan(3);
+
+	var extract = tar.extract();
+	var noEntries = false;
+
+	extract.on('entry', function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
+			mode: 0644,
+			uid: 501,
+			gid: 20,
+			size: 16,
+			mtime: new Date(1387580181000),
+			type: 'file',
+			linkname: null,
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+
+		stream.pipe(concat(function(data) {
+			noEntries = true;
+			t.same(data.toString(), 'hello long name\n');
+			callback();
+		}));
+	});
+
+	extract.on('finish', function() {
+		t.ok(noEntries);
+	});
+
+	extract.end(fs.readFileSync(fixtures.LONG_NAME_TAR));
+});
+
+test('unicode-bsd', function(t) { // can unpack a bsdtar unicoded tarball
+	t.plan(3);
+
+	var extract = tar.extract();
+	var noEntries = false;
+
+	extract.on('entry', function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'høllø.txt',
+			mode: 0644,
+			uid: 501,
+			gid: 20,
+			size: 4,
+			mtime: new Date(1387588646000),
+			type: 'file',
+			linkname: null,
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+
+		stream.pipe(concat(function(data) {
+			noEntries = true;
+			t.same(data.toString(), 'hej\n');
+			callback();
+		}));
+	});
+
+	extract.on('finish', function() {
+		t.ok(noEntries);
+	});
+
+	extract.end(fs.readFileSync(fixtures.UNICODE_BSD_TAR));
+});
+
+test('unicode', function(t) { // can unpack a bsdtar unicoded tarball
+	t.plan(3);
+
+	var extract = tar.extract();
+	var noEntries = false;
+
+	extract.on('entry', function(header, stream, callback) {
+		t.deepEqual(header, {
+			name: 'høstål.txt',
+			mode: 0644,
+			uid: 501,
+			gid: 20,
+			size: 8,
+			mtime: new Date(1387580181000),
+			type: 'file',
+			linkname: null,
+			uname: 'maf',
+			gname: 'staff',
+			devmajor: 0,
+			devminor: 0
+		});
+
+		stream.pipe(concat(function(data) {
+			noEntries = true;
+			t.same(data.toString(), 'høllø\n');
+			callback();
+		}));
+	});
+
+	extract.on('finish', function() {
+		t.ok(noEntries);
+	});
+
+	extract.end(fs.readFileSync(fixtures.UNICODE_TAR));
+});
+
+test('name-is-100', function(t) {
+	t.plan(3);
+
+	var extract = tar.extract();
+
+	extract.on('entry', function(header, stream, callback) {
+		t.same(header.name.length, 100);
+
+		stream.pipe(concat(function(data) {
+			t.same(data.toString(), 'hello\n');
+			callback();
+		}));
+	});
+
+	extract.on('finish', function() {
+		t.ok(true);
+	});
+
+	extract.end(fs.readFileSync(fixtures.NAME_IS_100_TAR));
+});
+
+test('invalid-file', function(t) {
+	t.plan(1);
+
+	var extract = tar.extract();
+
+	extract.on('error', function(err) {
+		t.ok(!!err);
+		extract.destroy();
+	});
+
+	extract.end(fs.readFileSync(fixtures.INVALID_TGZ));
+});
\ No newline at end of file
diff --git a/test/fixtures/index.js b/test/fixtures/index.js
new file mode 100644
index 0000000..59bc87b
--- /dev/null
+++ b/test/fixtures/index.js
@@ -0,0 +1,10 @@
+var path = require('path');
+
+exports.ONE_FILE_TAR = path.join(__dirname, 'one-file.tar');
+exports.MULTI_FILE_TAR = path.join(__dirname, 'multi-file.tar');
+exports.TYPES_TAR = path.join(__dirname, 'types.tar');
+exports.LONG_NAME_TAR = path.join(__dirname, 'long-name.tar');
+exports.UNICODE_BSD_TAR = path.join(__dirname, 'unicode-bsd.tar');
+exports.UNICODE_TAR = path.join(__dirname, 'unicode.tar');
+exports.NAME_IS_100_TAR = path.join(__dirname, 'name-is-100.tar');
+exports.INVALID_TGZ = path.join(__dirname, 'invalid.tgz');
\ No newline at end of file
diff --git a/test/fixtures/invalid.tgz b/test/fixtures/invalid.tgz
new file mode 100644
index 0000000..ea35ec4
Binary files /dev/null and b/test/fixtures/invalid.tgz differ
diff --git a/test/fixtures/long-name.tar b/test/fixtures/long-name.tar
new file mode 100644
index 0000000..cf93981
Binary files /dev/null and b/test/fixtures/long-name.tar differ
diff --git a/test/fixtures/multi-file.tar b/test/fixtures/multi-file.tar
new file mode 100644
index 0000000..6dabdf6
Binary files /dev/null and b/test/fixtures/multi-file.tar differ
diff --git a/test/fixtures/name-is-100.tar b/test/fixtures/name-is-100.tar
new file mode 100644
index 0000000..299b2e8
Binary files /dev/null and b/test/fixtures/name-is-100.tar differ
diff --git a/test/fixtures/one-file.tar b/test/fixtures/one-file.tar
new file mode 100644
index 0000000..8d4ac28
Binary files /dev/null and b/test/fixtures/one-file.tar differ
diff --git a/test/fixtures/types.tar b/test/fixtures/types.tar
new file mode 100644
index 0000000..197af7b
Binary files /dev/null and b/test/fixtures/types.tar differ
diff --git a/test/fixtures/unicode-bsd.tar b/test/fixtures/unicode-bsd.tar
new file mode 100644
index 0000000..2f74b5f
Binary files /dev/null and b/test/fixtures/unicode-bsd.tar differ
diff --git a/test/fixtures/unicode.tar b/test/fixtures/unicode.tar
new file mode 100644
index 0000000..ab7dbad
Binary files /dev/null and b/test/fixtures/unicode.tar differ
diff --git a/test/pack.js b/test/pack.js
new file mode 100644
index 0000000..69a7680
--- /dev/null
+++ b/test/pack.js
@@ -0,0 +1,144 @@
+var test = require('tap').test;
+var tar = require('../index');
+var fixtures = require('./fixtures');
+var concat = require('concat-stream');
+var fs = require('fs');
+
+test('one-file', function(t) {
+	t.plan(2);
+
+	var pack = tar.pack();
+
+	pack.entry({
+		name:'test.txt',
+		mtime:new Date(1387580181000),
+		mode:0644,
+		uname:'maf',
+		gname:'staff',
+		uid:501,
+		gid:20
+	}, 'hello world\n');
+
+	pack.finalize();
+
+	pack.pipe(concat(function(data) {
+		t.same(data.length & 511, 0);
+		t.deepEqual(data, fs.readFileSync(fixtures.ONE_FILE_TAR));
+	}));
+});
+
+test('multi-file', function(t) {
+	t.plan(2);
+
+	var pack = tar.pack();
+
+	pack.entry({
+		name:'file-1.txt',
+		mtime:new Date(1387580181000),
+		mode:0644,
+		uname:'maf',
+		gname:'staff',
+		uid:501,
+		gid:20
+	}, 'i am file-1\n');
+
+	pack.entry({
+		name:'file-2.txt',
+		mtime:new Date(1387580181000),
+		mode:0644,
+		size:12,
+		uname:'maf',
+		gname:'staff',
+		uid:501,
+		gid:20
+	}).end('i am file-2\n');
+
+	pack.finalize();
+
+	pack.pipe(concat(function(data) {
+		t.same(data.length & 511, 0);
+		t.deepEqual(data, fs.readFileSync(fixtures.MULTI_FILE_TAR));
+	}));
+});
+
+test('types', function(t) {
+	t.plan(2);
+	var pack = tar.pack();
+
+	pack.entry({
+		name:'directory',
+		mtime:new Date(1387580181000),
+		type:'directory',
+		mode:0755,
+		uname:'maf',
+		gname:'staff',
+		uid:501,
+		gid:20
+	});
+
+	pack.entry({
+		name:'directory-link',
+		mtime:new Date(1387580181000),
+		type:'symlink',
+		linkname: 'directory',
+		mode:0755,
+		uname:'maf',
+		gname:'staff',
+		uid:501,
+		gid:20
+	});
+
+	pack.finalize();
+
+	pack.pipe(concat(function(data) {
+		t.equal(data.length & 511, 0);
+		t.deepEqual(data, fs.readFileSync(fixtures.TYPES_TAR));
+	}));
+
+});
+
+test('long-name', function(t) {
+	t.plan(2);
+	var pack = tar.pack();
+
+	pack.entry({
+		name:'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
+		mtime:new Date(1387580181000),
+		type:'file',
+		mode:0644,
+		uname:'maf',
+		gname:'staff',
+		uid:501,
+		gid:20
+	}, 'hello long name\n');
+
+	pack.finalize();
+
+	pack.pipe(concat(function(data) {
+		t.equal(data.length & 511, 0);
+		t.deepEqual(data, fs.readFileSync(fixtures.LONG_NAME_TAR));
+	}));
+});
+
+test('unicode', function(t) {
+	t.plan(2);
+	var pack = tar.pack();
+
+	pack.entry({
+		name:'høstål.txt',
+		mtime:new Date(1387580181000),
+		type:'file',
+		mode:0644,
+		uname:'maf',
+		gname:'staff',
+		uid:501,
+		gid:20
+	}, 'høllø\n');
+
+	pack.finalize();
+
+	pack.pipe(concat(function(data) {
+		t.equal(data.length & 511, 0);
+		t.deepEqual(data, fs.readFileSync(fixtures.UNICODE_TAR));
+	}));
+});
\ No newline at end of file

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/node-tar-stream.git



More information about the Pkg-javascript-commits mailing list