[Pkg-javascript-commits] [node-tar-stream] 01/06: Imported Upstream version 1.5.2
Paolo Greppi
paolog-guest at moszumanska.debian.org
Tue Nov 29 13:06:16 UTC 2016
This is an automated email from the git hooks/post-receive script.
paolog-guest pushed a commit to branch master
in repository node-tar-stream.
commit e9d0ee31b37d32e15679e19c35527c59e1bc1d02
Author: Paolo Greppi <paolo.greppi at libpf.com>
Date: Tue Nov 29 13:45:44 2016 +0100
Imported Upstream version 1.5.2
---
.travis.yml | 6 +-
LICENSE | 21 +
README.md | 120 ++---
extract.js | 440 ++++++++++--------
headers.js | 499 +++++++++++---------
index.js | 4 +-
pack.js | 448 ++++++++++--------
package.json | 37 +-
test/extract.js | 925 ++++++++++++++++++++-----------------
test/fixtures/base-256-uid-gid.tar | Bin 0 -> 10240 bytes
test/fixtures/gnu-long-path.tar | Bin 0 -> 7594 bytes
test/fixtures/index.js | 22 +-
test/fixtures/pax.tar | Bin 0 -> 3072 bytes
test/fixtures/space.tar | Bin 0 -> 10240 bytes
test/pack.js | 313 +++++++------
15 files changed, 1589 insertions(+), 1246 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 9672e12..77f07b1 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,6 +1,6 @@
language: node_js
node_js:
- - "0.8"
- "0.10"
-before_install:
- - npm install -g npm@~1.4.6
+ - '0.12'
+ - '4'
+ - '5'
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..757562e
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Mathias Buus
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
\ No newline at end of file
diff --git a/README.md b/README.md
index 7ae980a..8d0a094 100644
--- a/README.md
+++ b/README.md
@@ -2,35 +2,50 @@
tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.
- npm install tar-stream
+Note that you still need to gunzip your data if you have a `.tar.gz`. We recommend using [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in conjunction with this.
+
+```
+npm install tar-stream
+```
[![build status](https://secure.travis-ci.org/mafintosh/tar-stream.png)](http://travis-ci.org/mafintosh/tar-stream)
-# Usage
+## Usage
tar-stream exposes two streams, [pack](https://github.com/mafintosh/tar-stream#packing) which creates tarballs and [extract](https://github.com/mafintosh/tar-stream#extracting) which extracts tarballs. To [modify an existing tarball](https://github.com/mafintosh/tar-stream#modifying-existing-tarballs) use both.
+
+It implementes USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc)
+
+## Related
+
+If you want to pack/unpack directories on the file system check out [tar-fs](https://github.com/mafintosh/tar-fs) which provides file system bindings to this module.
+
## Packing
To create a pack stream use `tar.pack()` and call `pack.entry(header, [callback])` to add tar entries.
``` js
-var tar = require('tar-stream');
-var pack = tar.pack(); // p is a streams2 stream
+var tar = require('tar-stream')
+var pack = tar.pack() // pack is a streams2 stream
// add a file called my-test.txt with the content "Hello World!"
-pack.entry({ name: 'my-test.txt' }, 'Hello World!');
+pack.entry({ name: 'my-test.txt' }, 'Hello World!')
// add a file called my-stream-test.txt from a stream
-var entry = pack.entry({ name: 'my-stream-test.txt' }, function(err) {
- // the stream was added
- // no more entries
- pack.finalize();
-});
-myStream.pipe(entry);
+var entry = pack.entry({ name: 'my-stream-test.txt', size: 11 }, function(err) {
+ // the stream was added
+ // no more entries
+ pack.finalize()
+})
+
+entry.write('hello')
+entry.write(' ')
+entry.write('world')
+entry.end()
// pipe the pack stream somewhere
-pack.pipe(process.stdout);
+pack.pipe(process.stdout)
```
## Extracting
@@ -38,47 +53,48 @@ pack.pipe(process.stdout);
To extract a stream use `tar.extract()` and listen for `extract.on('entry', header, stream, callback)`
``` js
-var extract = tar.extract();
+var extract = tar.extract()
extract.on('entry', function(header, stream, callback) {
- // header is the tar header
- // stream is the content body (might be an empty stream)
- // call next when you are done with this entry
+ // header is the tar header
+ // stream is the content body (might be an empty stream)
+ // call next when you are done with this entry
+
+ stream.on('end', function() {
+ callback() // ready for next entry
+ })
- stream.resume(); // just auto drain the stream
- stream.on('end', function() {
- callback(); // ready for next entry
- });
-});
+ stream.resume() // just auto drain the stream
+})
extract.on('finish', function() {
- // all entries read
-});
+ // all entries read
+})
-pack.pipe(extract);
+pack.pipe(extract)
```
## Headers
The header object using in `entry` should contain the following properties.
-Most of these values can be found by stating a file.
+Most of these values can be found by stat'ing a file.
``` js
{
- name: 'path/to/this/entry.txt',
- size: 1314, // entry size. defaults to 0
- mode: 0644, // entry mode. defaults to to 0755 for dirs and 0644 otherwise
- mtime: new Date(), // last modified date for entry. defaults to now.
- type: 'file', // type of entry. defaults to file. can be:
- // file | link | symlink | directory | block-device
- // character-device | fifo | contigious-file
- linkname: 'path', // linked file name
- uid: 0, // uid of entry owner. defaults to 0
- gid: 0, // gid of entry owner. defaults to 0
- uname: 'maf', // uname of entry owner. defaults to null
- gname: 'staff', // gname of entry owner. defaults to null
- devmajor: 0, // device major version. defaults to 0
- devminor: 0 // device minor version. defaults to 0
+ name: 'path/to/this/entry.txt',
+ size: 1314, // entry size. defaults to 0
+ mode: 0644, // entry mode. defaults to to 0755 for dirs and 0644 otherwise
+ mtime: new Date(), // last modified date for entry. defaults to now.
+ type: 'file', // type of entry. defaults to file. can be:
+ // file | link | symlink | directory | block-device
+ // character-device | fifo | contiguous-file
+ linkname: 'path', // linked file name
+ uid: 0, // uid of entry owner. defaults to 0
+ gid: 0, // gid of entry owner. defaults to 0
+ uname: 'maf', // uname of entry owner. defaults to null
+ gname: 'staff', // gname of entry owner. defaults to null
+ devmajor: 0, // device major version. defaults to 0
+ devminor: 0 // device minor version. defaults to 0
}
```
@@ -87,27 +103,27 @@ Most of these values can be found by stating a file.
Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball.
``` js
-var extract = tar.extract();
-var pack = tar.pack();
-var path = require('path');
+var extract = tar.extract()
+var pack = tar.pack()
+var path = require('path')
extract.on('entry', function(header, stream, callback) {
- // let's prefix all names with 'tmp'
- header.name = path.join('tmp', header.name);
- // write the new entry to the pack stream
- stream.pipe(pack.entry(header, callback));
-});
+ // let's prefix all names with 'tmp'
+ header.name = path.join('tmp', header.name)
+ // write the new entry to the pack stream
+ stream.pipe(pack.entry(header, callback))
+})
extract.on('finish', function() {
- // all entries done - lets finalize it
- pack.finalize();
-});
+ // all entries done - lets finalize it
+ pack.finalize()
+})
// pipe the old tarball to the extractor
-oldTarball.pipe(extract);
+oldTarballStream.pipe(extract)
// pipe the new tarball the another stream
-pack.pipe(newTarball);
+pack.pipe(newTarballStream)
```
## Performance
diff --git a/extract.js b/extract.js
index c5954eb..6b3b9bf 100644
--- a/extract.js
+++ b/extract.js
@@ -1,194 +1,246 @@
-var util = require('util');
-var bl = require('bl');
-var xtend = require('xtend');
-var headers = require('./headers');
-
-var Writable = require('readable-stream').Writable;
-var PassThrough = require('readable-stream').PassThrough;
-
-var noop = function() {};
-
-var overflow = function(size) {
- size &= 511;
- return size && 512 - size;
-};
-
-var emptyStream = function() {
- var s = new PassThrough();
- s.end();
- return s;
-};
-
-var mixinPax = function(header, pax) {
- if (pax.path) header.name = pax.path;
- if (pax.linkpath) header.linkname = pax.linkpath;
- return header;
-};
-
-var Extract = function(opts) {
- if (!(this instanceof Extract)) return new Extract(opts);
- Writable.call(this, opts);
-
- this._buffer = bl();
- this._missing = 0;
- this._onparse = noop;
- this._header = null;
- this._stream = null;
- this._overflow = null;
- this._cb = null;
- this._locked = false;
- this._destroyed = false;
- this._pax = null;
- this._paxGlobal = null;
-
- var self = this;
- var b = self._buffer;
-
- var oncontinue = function() {
- self._continue();
- };
-
- var onunlock = function(err) {
- self._locked = false;
- if (err) return self.destroy(err);
- if (!self._stream) oncontinue();
- };
-
- var onstreamend = function() {
- self._stream = null;
- var drain = overflow(self._header.size);
- if (drain) self._parse(drain, ondrain);
- else self._parse(512, onheader);
- if (!self._locked) oncontinue();
- };
-
- var ondrain = function() {
- self._buffer.consume(overflow(self._header.size));
- self._parse(512, onheader);
- oncontinue();
- };
-
- var onpaxglobalheader = function() {
- var size = self._header.size;
- self._paxGlobal = headers.decodePax(b.slice(0, size));
- b.consume(size);
- onstreamend();
- }
-
- var onpaxheader = function() {
- var size = self._header.size;
- self._pax = headers.decodePax(b.slice(0, size));
- if (self._paxGlobal) self._pax = xtend(self._paxGlobal, self._pax);
- b.consume(size);
- onstreamend();
- };
-
- var onheader = function() {
- var header
- try {
- header = self._header = headers.decode(b.slice(0, 512));
- } catch (err) {
- self.emit('error', err);
- }
- b.consume(512);
-
- if (!header) {
- self._parse(512, onheader);
- oncontinue();
- return;
- }
- if (header.type === 'pax-global-header') {
- self._parse(header.size, onpaxglobalheader);
- oncontinue();
- return;
- }
- if (header.type === 'pax-header') {
- self._parse(header.size, onpaxheader);
- oncontinue();
- return;
- }
-
- if (self._pax) {
- self._header = header = mixinPax(header, self._pax);
- self._pax = null;
- }
-
- self._locked = true;
-
- if (!header.size) {
- self._parse(512, onheader);
- self.emit('entry', header, emptyStream(), onunlock);
- return;
- }
-
- self._stream = new PassThrough();
-
- self.emit('entry', header, self._stream, onunlock);
- self._parse(header.size, onstreamend);
- oncontinue();
- };
-
- this._parse(512, onheader);
-};
-
-util.inherits(Extract, Writable);
-
-Extract.prototype.destroy = function(err) {
- if (this._destroyed) return;
- this._destroyed = true;
-
- if (err) this.emit('error', err);
- this.emit('close');
- if (this._stream) this._stream.emit('close');
-};
-
-Extract.prototype._parse = function(size, onparse) {
- if (this._destroyed) return;
- this._missing = size;
- this._onparse = onparse;
-};
-
-Extract.prototype._continue = function(err) {
- if (this._destroyed) return;
- var cb = this._cb;
- this._cb = noop;
- if (this._overflow) this._write(this._overflow, undefined, cb);
- else cb();
-};
-
-Extract.prototype._write = function(data, enc, cb) {
- if (this._destroyed) return;
-
- var s = this._stream;
- var b = this._buffer;
- var missing = this._missing;
-
- // we do not reach end-of-chunk now. just forward it
-
- if (data.length < missing) {
- this._missing -= data.length;
- this._overflow = null;
- if (s) return s.write(data, cb);
- b.append(data);
- return cb();
- }
-
- // end-of-chunk. the parser should call cb.
-
- this._cb = cb;
- this._missing = 0;
-
- var overflow = null;
- if (data.length > missing) {
- overflow = data.slice(missing);
- data = data.slice(0, missing);
- }
-
- if (s) s.end(data);
- else b.append(data);
-
- this._overflow = overflow;
- this._onparse();
-};
-
-module.exports = Extract;
+var util = require('util')
+var bl = require('bl')
+var xtend = require('xtend')
+var headers = require('./headers')
+
+var Writable = require('readable-stream').Writable
+var PassThrough = require('readable-stream').PassThrough
+
+var noop = function () {}
+
+var overflow = function (size) {
+ size &= 511
+ return size && 512 - size
+}
+
+var emptyStream = function (self, offset) {
+ var s = new Source(self, offset)
+ s.end()
+ return s
+}
+
+var mixinPax = function (header, pax) {
+ if (pax.path) header.name = pax.path
+ if (pax.linkpath) header.linkname = pax.linkpath
+ header.pax = pax
+ return header
+}
+
+var Source = function (self, offset) {
+ this._parent = self
+ this.offset = offset
+ PassThrough.call(this)
+}
+
+util.inherits(Source, PassThrough)
+
+Source.prototype.destroy = function (err) {
+ this._parent.destroy(err)
+}
+
+var Extract = function (opts) {
+ if (!(this instanceof Extract)) return new Extract(opts)
+ Writable.call(this, opts)
+
+ this._offset = 0
+ this._buffer = bl()
+ this._missing = 0
+ this._onparse = noop
+ this._header = null
+ this._stream = null
+ this._overflow = null
+ this._cb = null
+ this._locked = false
+ this._destroyed = false
+ this._pax = null
+ this._paxGlobal = null
+ this._gnuLongPath = null
+ this._gnuLongLinkPath = null
+
+ var self = this
+ var b = self._buffer
+
+ var oncontinue = function () {
+ self._continue()
+ }
+
+ var onunlock = function (err) {
+ self._locked = false
+ if (err) return self.destroy(err)
+ if (!self._stream) oncontinue()
+ }
+
+ var onstreamend = function () {
+ self._stream = null
+ var drain = overflow(self._header.size)
+ if (drain) self._parse(drain, ondrain)
+ else self._parse(512, onheader)
+ if (!self._locked) oncontinue()
+ }
+
+ var ondrain = function () {
+ self._buffer.consume(overflow(self._header.size))
+ self._parse(512, onheader)
+ oncontinue()
+ }
+
+ var onpaxglobalheader = function () {
+ var size = self._header.size
+ self._paxGlobal = headers.decodePax(b.slice(0, size))
+ b.consume(size)
+ onstreamend()
+ }
+
+ var onpaxheader = function () {
+ var size = self._header.size
+ self._pax = headers.decodePax(b.slice(0, size))
+ if (self._paxGlobal) self._pax = xtend(self._paxGlobal, self._pax)
+ b.consume(size)
+ onstreamend()
+ }
+
+ var ongnulongpath = function () {
+ var size = self._header.size
+ this._gnuLongPath = headers.decodeLongPath(b.slice(0, size))
+ b.consume(size)
+ onstreamend()
+ }
+
+ var ongnulonglinkpath = function () {
+ var size = self._header.size
+ this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size))
+ b.consume(size)
+ onstreamend()
+ }
+
+ var onheader = function () {
+ var offset = self._offset
+ var header
+ try {
+ header = self._header = headers.decode(b.slice(0, 512))
+ } catch (err) {
+ self.emit('error', err)
+ }
+ b.consume(512)
+
+ if (!header) {
+ self._parse(512, onheader)
+ oncontinue()
+ return
+ }
+ if (header.type === 'gnu-long-path') {
+ self._parse(header.size, ongnulongpath)
+ oncontinue()
+ return
+ }
+ if (header.type === 'gnu-long-link-path') {
+ self._parse(header.size, ongnulonglinkpath)
+ oncontinue()
+ return
+ }
+ if (header.type === 'pax-global-header') {
+ self._parse(header.size, onpaxglobalheader)
+ oncontinue()
+ return
+ }
+ if (header.type === 'pax-header') {
+ self._parse(header.size, onpaxheader)
+ oncontinue()
+ return
+ }
+
+ if (self._gnuLongPath) {
+ header.name = self._gnuLongPath
+ self._gnuLongPath = null
+ }
+
+ if (self._gnuLongLinkPath) {
+ header.linkname = self._gnuLongLinkPath
+ self._gnuLongLinkPath = null
+ }
+
+ if (self._pax) {
+ self._header = header = mixinPax(header, self._pax)
+ self._pax = null
+ }
+
+ self._locked = true
+
+ if (!header.size) {
+ self._parse(512, onheader)
+ self.emit('entry', header, emptyStream(self, offset), onunlock)
+ return
+ }
+
+ self._stream = new Source(self, offset)
+
+ self.emit('entry', header, self._stream, onunlock)
+ self._parse(header.size, onstreamend)
+ oncontinue()
+ }
+
+ this._parse(512, onheader)
+}
+
+util.inherits(Extract, Writable)
+
+Extract.prototype.destroy = function (err) {
+ if (this._destroyed) return
+ this._destroyed = true
+
+ if (err) this.emit('error', err)
+ this.emit('close')
+ if (this._stream) this._stream.emit('close')
+}
+
+Extract.prototype._parse = function (size, onparse) {
+ if (this._destroyed) return
+ this._offset += size
+ this._missing = size
+ this._onparse = onparse
+}
+
+Extract.prototype._continue = function () {
+ if (this._destroyed) return
+ var cb = this._cb
+ this._cb = noop
+ if (this._overflow) this._write(this._overflow, undefined, cb)
+ else cb()
+}
+
+Extract.prototype._write = function (data, enc, cb) {
+ if (this._destroyed) return
+
+ var s = this._stream
+ var b = this._buffer
+ var missing = this._missing
+
+ // we do not reach end-of-chunk now. just forward it
+
+ if (data.length < missing) {
+ this._missing -= data.length
+ this._overflow = null
+ if (s) return s.write(data, cb)
+ b.append(data)
+ return cb()
+ }
+
+ // end-of-chunk. the parser should call cb.
+
+ this._cb = cb
+ this._missing = 0
+
+ var overflow = null
+ if (data.length > missing) {
+ overflow = data.slice(missing)
+ data = data.slice(0, missing)
+ }
+
+ if (s) s.end(data)
+ else b.append(data)
+
+ this._overflow = overflow
+ this._onparse()
+}
+
+module.exports = Extract
diff --git a/headers.js b/headers.js
index 5ea4333..8c75edc 100644
--- a/headers.js
+++ b/headers.js
@@ -1,218 +1,281 @@
-var ZEROS = '0000000000000000000';
-var ZERO_OFFSET = '0'.charCodeAt(0);
-var USTAR = 'ustar\x0000';
-
-var clamp = function(index, len, defaultValue) {
- if (typeof index !== 'number') return defaultValue;
- index = ~~index; // Coerce to integer.
- if (index >= len) return len;
- if (index >= 0) return index;
- index += len;
- if (index >= 0) return index;
- return 0;
-};
-
-var toType = function(flag) {
- switch (flag) {
- case 0:
- return 'file';
- case 1:
- return 'link';
- case 2:
- return 'symlink';
- case 3:
- return 'character-device';
- case 4:
- return 'block-device';
- case 5:
- return 'directory';
- case 6:
- return 'fifo';
- case 7:
- return 'contiguous-file';
- case 72:
- return 'pax-header';
- case 55:
- return 'pax-global-header'
- }
-
- return null;
-};
-
-var toTypeflag = function(flag) {
- switch (flag) {
- case 'file':
- return 0;
- case 'link':
- return 1;
- case 'symlink':
- return 2;
- case 'character-device':
- return 3;
- case 'block-device':
- return 4;
- case 'directory':
- return 5;
- case 'fifo':
- return 6;
- case 'contiguous-file':
- return 7;
- case 'pax-header':
- return 72;
- }
-
- return 0;
-};
-
-var alloc = function(size) {
- var buf = new Buffer(size);
- buf.fill(0);
- return buf;
-};
-
-var indexOf = function(block, num, offset, end) {
- for (; offset < end; offset++) {
- if (block[offset] === num) return offset;
- }
- return end;
-};
-
-var cksum = function(block) {
- var sum = 8 * 32;
- for (var i = 0; i < 148; i++) sum += block[i];
- for (var i = 156; i < 512; i++) sum += block[i];
- return sum;
-};
-
-var encodeOct = function(val, n) {
- val = val.toString(8);
- return ZEROS.slice(0, n-val.length)+val+' ';
-};
-
-var decodeOct = function(val, offset) {
- return parseInt(val.slice(offset, clamp(indexOf(val, 32, offset, val.length), val.length, val.length)).toString(), 8);
-};
-
-var decodeStr = function(val, offset, length) {
- return val.slice(offset, indexOf(val, 0, offset, offset+length)).toString();
-};
-
-var addLength = function(str) {
- var len = Buffer.byteLength(str);
- var digits = Math.floor(Math.log(len) / Math.log(10)) + 1;
- if (len + digits > Math.pow(10, digits)) digits++;
-
- return (len+digits)+str;
-};
-
-exports.encodePax = function(opts) { // TODO: encode more stuff in pax
- var result = '';
- if (opts.name) result += addLength(' path='+opts.name+'\n');
- if (opts.linkname) result += addLength(' linkpath='+opts.linkname+'\n');
- return new Buffer(result);
-};
-
-exports.decodePax = function(buf) {
- var result = {};
-
- while (buf.length) {
- var i = 0;
- for (; i < buf.length && buf[i] !== 32; i++);
- var len = parseInt(buf.slice(0, i).toString());
- if (!len) return result;
-
- var b = buf.slice(i+1, len-1).toString();
- var keyIndex = b.indexOf('=');
- if (keyIndex === -1) return result;
- result[b.slice(0, keyIndex)] = b.slice(keyIndex+1);
-
- buf = buf.slice(len);
- }
-
- return result;
-};
-
-exports.encode = function(opts) {
- var buf = alloc(512);
- var name = opts.name;
- var prefix = '';
-
- if (opts.typeflag === 5 && name[name.length-1] !== '/') name += '/';
- if (Buffer.byteLength(name) !== name.length) return null; // utf-8
-
- while (Buffer.byteLength(name) > 100) {
- var i = name.indexOf('/');
- if (i === -1) return null;
- prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i);
- name = name.slice(i+1);
- }
-
- if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null;
- if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null;
-
- buf.write(name);
- buf.write(encodeOct(opts.mode & 07777, 6), 100);
- buf.write(encodeOct(opts.uid, 6), 108);
- buf.write(encodeOct(opts.gid, 6), 116);
- buf.write(encodeOct(opts.size, 11), 124);
- buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136);
-
- buf[156] = ZERO_OFFSET + toTypeflag(opts.type);
-
- if (opts.linkname) buf.write(opts.linkname, 157);
-
- buf.write(USTAR, 257);
- if (opts.uname) buf.write(opts.uname, 265);
- if (opts.gname) buf.write(opts.gname, 297);
- buf.write(encodeOct(opts.devmajor || 0, 6), 329);
- buf.write(encodeOct(opts.devminor || 0, 6), 337);
-
- if (prefix) buf.write(prefix, 345);
-
- buf.write(encodeOct(cksum(buf), 6), 148);
-
- return buf;
-};
-
-exports.decode = function(buf) {
- var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET;
- var type = toType(typeflag);
-
- var name = decodeStr(buf, 0, 100);
- var mode = decodeOct(buf, 100);
- var uid = decodeOct(buf, 108);
- var gid = decodeOct(buf, 116);
- var size = decodeOct(buf, 124);
- var mtime = decodeOct(buf, 136);
- var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100);
- var uname = decodeStr(buf, 265, 32);
- var gname = decodeStr(buf, 297, 32);
- var devmajor = decodeOct(buf, 329);
- var devminor = decodeOct(buf, 337);
-
- if (buf[345]) name = decodeStr(buf, 345, 155)+'/'+name;
-
- var c = cksum(buf)
-
- //checksum is still initial value if header was null.
- if (c === 8*32) return null;
-
- //valid checksum
- if (c !== decodeOct(buf, 148)) throw new Error('invalid header');
-
- return {
- name: name,
- mode: mode,
- uid: uid,
- gid: gid,
- size: size,
- mtime: new Date(1000 * mtime),
- type: toType(typeflag),
- linkname: linkname,
- uname: uname,
- gname: gname,
- devmajor: devmajor,
- devminor: devminor
- };
-};
-
+var ZEROS = '0000000000000000000'
+var ZERO_OFFSET = '0'.charCodeAt(0)
+var USTAR = 'ustar\x0000'
+var MASK = parseInt('7777', 8)
+
+var clamp = function (index, len, defaultValue) {
+ if (typeof index !== 'number') return defaultValue
+ index = ~~index // Coerce to integer.
+ if (index >= len) return len
+ if (index >= 0) return index
+ index += len
+ if (index >= 0) return index
+ return 0
+}
+
+var toType = function (flag) {
+ switch (flag) {
+ case 0:
+ return 'file'
+ case 1:
+ return 'link'
+ case 2:
+ return 'symlink'
+ case 3:
+ return 'character-device'
+ case 4:
+ return 'block-device'
+ case 5:
+ return 'directory'
+ case 6:
+ return 'fifo'
+ case 7:
+ return 'contiguous-file'
+ case 72:
+ return 'pax-header'
+ case 55:
+ return 'pax-global-header'
+ case 27:
+ return 'gnu-long-link-path'
+ case 28:
+ case 30:
+ return 'gnu-long-path'
+ }
+
+ return null
+}
+
+var toTypeflag = function (flag) {
+ switch (flag) {
+ case 'file':
+ return 0
+ case 'link':
+ return 1
+ case 'symlink':
+ return 2
+ case 'character-device':
+ return 3
+ case 'block-device':
+ return 4
+ case 'directory':
+ return 5
+ case 'fifo':
+ return 6
+ case 'contiguous-file':
+ return 7
+ case 'pax-header':
+ return 72
+ }
+
+ return 0
+}
+
+var alloc = function (size) {
+ var buf = new Buffer(size)
+ buf.fill(0)
+ return buf
+}
+
+var indexOf = function (block, num, offset, end) {
+ for (; offset < end; offset++) {
+ if (block[offset] === num) return offset
+ }
+ return end
+}
+
+var cksum = function (block) {
+ var sum = 8 * 32
+ for (var i = 0; i < 148; i++) sum += block[i]
+ for (var j = 156; j < 512; j++) sum += block[j]
+ return sum
+}
+
+var encodeOct = function (val, n) {
+ val = val.toString(8)
+ return ZEROS.slice(0, n - val.length) + val + ' '
+}
+
+/* Copied from the node-tar repo and modified to meet
+ * tar-stream coding standard.
+ *
+ * Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349
+ */
+function parse256 (buf) {
+ // first byte MUST be either 80 or FF
+ // 80 for positive, FF for 2's comp
+ var positive
+ if (buf[0] === 0x80) positive = true
+ else if (buf[0] === 0xFF) positive = false
+ else return null
+
+ // build up a base-256 tuple from the least sig to the highest
+ var zero = false
+ var tuple = []
+ for (var i = buf.length - 1; i > 0; i--) {
+ var byte = buf[i]
+ if (positive) tuple.push(byte)
+ else if (zero && byte === 0) tuple.push(0)
+ else if (zero) {
+ zero = false
+ tuple.push(0x100 - byte)
+ } else tuple.push(0xFF - byte)
+ }
+
+ var sum = 0
+ var l = tuple.length
+ for (i = 0; i < l; i++) {
+ sum += tuple[i] * Math.pow(256, i)
+ }
+
+ return positive ? sum : -1 * sum
+}
+
+var decodeOct = function (val, offset) {
+ // If prefixed with 0x80 then parse as a base-256 integer
+ if (val[offset] & 0x80) {
+ return parse256(val.slice(offset, offset + 8))
+ } else {
+ // Older versions of tar can prefix with spaces
+ while (offset < val.length && val[offset] === 32) offset++
+ var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
+ while (offset < end && val[offset] === 0) offset++
+ if (end === offset) return 0
+ return parseInt(val.slice(offset, end).toString(), 8)
+ }
+}
+
+var decodeStr = function (val, offset, length) {
+ return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString()
+}
+
+var addLength = function (str) {
+ var len = Buffer.byteLength(str)
+ var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
+ if (len + digits > Math.pow(10, digits)) digits++
+
+ return (len + digits) + str
+}
+
+exports.decodeLongPath = function (buf) {
+ return decodeStr(buf, 0, buf.length)
+}
+
+exports.encodePax = function (opts) { // TODO: encode more stuff in pax
+ var result = ''
+ if (opts.name) result += addLength(' path=' + opts.name + '\n')
+ if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
+ var pax = opts.pax
+ if (pax) {
+ for (var key in pax) {
+ result += addLength(' ' + key + '=' + pax[key] + '\n')
+ }
+ }
+ return new Buffer(result)
+}
+
+exports.decodePax = function (buf) {
+ var result = {}
+
+ while (buf.length) {
+ var i = 0
+ while (i < buf.length && buf[i] !== 32) i++
+ var len = parseInt(buf.slice(0, i).toString(), 10)
+ if (!len) return result
+
+ var b = buf.slice(i + 1, len - 1).toString()
+ var keyIndex = b.indexOf('=')
+ if (keyIndex === -1) return result
+ result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
+
+ buf = buf.slice(len)
+ }
+
+ return result
+}
+
+exports.encode = function (opts) {
+ var buf = alloc(512)
+ var name = opts.name
+ var prefix = ''
+
+ if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
+ if (Buffer.byteLength(name) !== name.length) return null // utf-8
+
+ while (Buffer.byteLength(name) > 100) {
+ var i = name.indexOf('/')
+ if (i === -1) return null
+ prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
+ name = name.slice(i + 1)
+ }
+
+ if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
+ if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null
+
+ buf.write(name)
+ buf.write(encodeOct(opts.mode & MASK, 6), 100)
+ buf.write(encodeOct(opts.uid, 6), 108)
+ buf.write(encodeOct(opts.gid, 6), 116)
+ buf.write(encodeOct(opts.size, 11), 124)
+ buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
+
+ buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
+
+ if (opts.linkname) buf.write(opts.linkname, 157)
+
+ buf.write(USTAR, 257)
+ if (opts.uname) buf.write(opts.uname, 265)
+ if (opts.gname) buf.write(opts.gname, 297)
+ buf.write(encodeOct(opts.devmajor || 0, 6), 329)
+ buf.write(encodeOct(opts.devminor || 0, 6), 337)
+
+ if (prefix) buf.write(prefix, 345)
+
+ buf.write(encodeOct(cksum(buf), 6), 148)
+
+ return buf
+}
+
+exports.decode = function (buf) {
+ var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
+
+ var name = decodeStr(buf, 0, 100)
+ var mode = decodeOct(buf, 100)
+ var uid = decodeOct(buf, 108)
+ var gid = decodeOct(buf, 116)
+ var size = decodeOct(buf, 124)
+ var mtime = decodeOct(buf, 136)
+ var type = toType(typeflag)
+ var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100)
+ var uname = decodeStr(buf, 265, 32)
+ var gname = decodeStr(buf, 297, 32)
+ var devmajor = decodeOct(buf, 329)
+ var devminor = decodeOct(buf, 337)
+
+ if (buf[345]) name = decodeStr(buf, 345, 155) + '/' + name
+
+ // to support old tar versions that use trailing / to indicate dirs
+ if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
+
+ var c = cksum(buf)
+
+ // checksum is still initial value if header was null.
+ if (c === 8 * 32) return null
+
+ // valid checksum
+ if (c !== decodeOct(buf, 148)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
+
+ return {
+ name: name,
+ mode: mode,
+ uid: uid,
+ gid: gid,
+ size: size,
+ mtime: new Date(1000 * mtime),
+ type: type,
+ linkname: linkname,
+ uname: uname,
+ gname: gname,
+ devmajor: devmajor,
+ devminor: devminor
+ }
+}
diff --git a/index.js b/index.js
index dbd60ed..6481704 100644
--- a/index.js
+++ b/index.js
@@ -1,2 +1,2 @@
-exports.extract = require('./extract');
-exports.pack = require('./pack');
\ No newline at end of file
+exports.extract = require('./extract')
+exports.pack = require('./pack')
diff --git a/pack.js b/pack.js
index df5f407..025f007 100644
--- a/pack.js
+++ b/pack.js
@@ -1,194 +1,254 @@
-var util = require('util');
-var eos = require('end-of-stream');
-var headers = require('./headers');
-
-var Readable = require('readable-stream').Readable;
-var Writable = require('readable-stream').Writable;
-var PassThrough = require('readable-stream').PassThrough;
-
-var END_OF_TAR = new Buffer(1024);
-END_OF_TAR.fill(0);
-
-var noop = function() {};
-
-var overflow = function(self, size) {
- size &= 511;
- if (size) self.push(END_OF_TAR.slice(0, 512 - size));
-};
-
-var Sink = function(to) {
- Writable.call(this);
- this.written = 0;
- this._to = to;
- this._destroyed = false;
-};
-
-util.inherits(Sink, Writable);
-
-Sink.prototype._write = function(data, enc, cb) {
- this.written += data.length;
- if (this._to.push(data)) return cb();
- this._to._drain = cb;
-};
-
-Sink.prototype.destroy = function() {
- if (this._destroyed) return;
- this._destroyed = true;
- this.emit('close');
-};
-
-var Void = function() {
- Writable.call(this)
- this._destroyed = false;
-};
-
-util.inherits(Void, Writable);
-
-Void.prototype._write = function(data, enc, cb) {
- cb(new Error('No body allowed for this entry'))
-};
-
-Void.prototype.destroy = function() {
- if (this._destroyed) return;
- this._destroyed = true;
- this.emit('close')
-}
-
-var Pack = function(opts) {
- if (!(this instanceof Pack)) return new Pack(opts);
- Readable.call(this, opts);
-
- this._drain = noop;
- this._finalized = false;
- this._finalizing = false;
- this._destroyed = false;
- this._stream = null;
-};
-
-util.inherits(Pack, Readable);
-
-Pack.prototype.entry = function(header, buffer, callback) {
- if (this._stream) throw new Error('already piping an entry');
- if (this._finalized || this._destroyed) return;
-
- if (typeof buffer === 'function') {
- callback = buffer;
- buffer = null;
- }
-
- if (!callback) callback = noop;
-
- var self = this;
-
- if (!header.size) header.size = 0;
- if (!header.type) header.type = 'file';
- if (!header.mode) header.mode = header.type === 'directory' ? 0755 : 0644;
- if (!header.uid) header.uid = 0;
- if (!header.gid) header.gid = 0;
- if (!header.mtime) header.mtime = new Date();
-
- if (typeof buffer === 'string') buffer = new Buffer(buffer);
- if (Buffer.isBuffer(buffer)) {
- header.size = buffer.length;
- this._encode(header);
- this.push(buffer);
- overflow(self, header.size);
- process.nextTick(callback);
- return new Void();
- }
- if (header.type !== 'file' && header.type !== 'contigious-file') {
- this._encode(header);
- process.nextTick(callback);
- return new Void();
- }
-
- var sink = new Sink(this);
-
- this._encode(header);
- this._stream = sink;
-
- eos(sink, function(err) {
- self._stream = null;
-
- if (err) { // stream was closed
- self.destroy();
- return callback(err);
- }
-
- if (sink.written !== header.size) { // corrupting tar
- self.destroy();
- return callback(new Error('size mismatch'));
- }
-
- overflow(self, header.size);
- if (self._finalizing) self.finalize();
- callback();
- });
-
- return sink;
-};
-
-Pack.prototype.finalize = function() {
- if (this._stream) {
- this._finalizing = true;
- return;
- }
-
- if (this._finalized) return;
- this._finalized = true;
- this.push(END_OF_TAR);
- this.push(null);
-};
-
-Pack.prototype.destroy = function(err) {
- if (this._destroyed) return;
- this._destroyed = true;
-
- if (err) this.emit('error', err);
- this.emit('close');
- if (this._stream && this._stream.destroy) this._stream.destroy();
-};
-
-Pack.prototype._encode = function(header) {
- var buf = headers.encode(header);
- if (buf) this.push(buf);
- else this._encodePax(header);
-};
-
-Pack.prototype._encodePax = function(header) {
- var paxHeader = headers.encodePax({
- name: header.name,
- linkname: header.linkname
- });
-
- var newHeader = {
- name: 'PaxHeader',
- mode: header.mode,
- uid: header.uid,
- gid: header.gid,
- size: paxHeader.length,
- mtime: header.mtime,
- type: 'pax-header',
- linkname: header.linkname && 'PaxHeader',
- uname: header.uname,
- gname: header.gname,
- devmajor: header.devmajor,
- devminor: header.devminor
- };
-
- this.push(headers.encode(newHeader));
- this.push(paxHeader);
- overflow(this, paxHeader.length);
-
- newHeader.size = header.size;
- newHeader.type = header.type;
- this.push(headers.encode(newHeader));
-};
-
-Pack.prototype._read = function(n) {
- var drain = this._drain;
- this._drain = noop;
- drain();
-};
-
-module.exports = Pack;
+var constants = require('constants')
+var eos = require('end-of-stream')
+var util = require('util')
+
+var Readable = require('readable-stream').Readable
+var Writable = require('readable-stream').Writable
+var StringDecoder = require('string_decoder').StringDecoder
+
+var headers = require('./headers')
+
+var DMODE = parseInt('755', 8)
+var FMODE = parseInt('644', 8)
+
+var END_OF_TAR = new Buffer(1024)
+END_OF_TAR.fill(0)
+
+var noop = function () {}
+
+var overflow = function (self, size) {
+ size &= 511
+ if (size) self.push(END_OF_TAR.slice(0, 512 - size))
+}
+
+function modeToType (mode) {
+ switch (mode & constants.S_IFMT) {
+ case constants.S_IFBLK: return 'block-device'
+ case constants.S_IFCHR: return 'character-device'
+ case constants.S_IFDIR: return 'directory'
+ case constants.S_IFIFO: return 'fifo'
+ case constants.S_IFLNK: return 'symlink'
+ }
+
+ return 'file'
+}
+
+var Sink = function (to) {
+ Writable.call(this)
+ this.written = 0
+ this._to = to
+ this._destroyed = false
+}
+
+util.inherits(Sink, Writable)
+
+Sink.prototype._write = function (data, enc, cb) {
+ this.written += data.length
+ if (this._to.push(data)) return cb()
+ this._to._drain = cb
+}
+
+Sink.prototype.destroy = function () {
+ if (this._destroyed) return
+ this._destroyed = true
+ this.emit('close')
+}
+
+var LinkSink = function () {
+ Writable.call(this)
+ this.linkname = ''
+ this._decoder = new StringDecoder('utf-8')
+ this._destroyed = false
+}
+
+util.inherits(LinkSink, Writable)
+
+LinkSink.prototype._write = function (data, enc, cb) {
+ this.linkname += this._decoder.write(data)
+ cb()
+}
+
+LinkSink.prototype.destroy = function () {
+ if (this._destroyed) return
+ this._destroyed = true
+ this.emit('close')
+}
+
+var Void = function () {
+ Writable.call(this)
+ this._destroyed = false
+}
+
+util.inherits(Void, Writable)
+
+Void.prototype._write = function (data, enc, cb) {
+ cb(new Error('No body allowed for this entry'))
+}
+
+Void.prototype.destroy = function () {
+ if (this._destroyed) return
+ this._destroyed = true
+ this.emit('close')
+}
+
+var Pack = function (opts) {
+ if (!(this instanceof Pack)) return new Pack(opts)
+ Readable.call(this, opts)
+
+ this._drain = noop
+ this._finalized = false
+ this._finalizing = false
+ this._destroyed = false
+ this._stream = null
+}
+
+util.inherits(Pack, Readable)
+
+Pack.prototype.entry = function (header, buffer, callback) {
+ if (this._stream) throw new Error('already piping an entry')
+ if (this._finalized || this._destroyed) return
+
+ if (typeof buffer === 'function') {
+ callback = buffer
+ buffer = null
+ }
+
+ if (!callback) callback = noop
+
+ var self = this
+
+ if (!header.size || header.type === 'symlink') header.size = 0
+ if (!header.type) header.type = modeToType(header.mode)
+ if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
+ if (!header.uid) header.uid = 0
+ if (!header.gid) header.gid = 0
+ if (!header.mtime) header.mtime = new Date()
+
+ if (typeof buffer === 'string') buffer = new Buffer(buffer)
+ if (Buffer.isBuffer(buffer)) {
+ header.size = buffer.length
+ this._encode(header)
+ this.push(buffer)
+ overflow(self, header.size)
+ process.nextTick(callback)
+ return new Void()
+ }
+
+ if (header.type === 'symlink' && !header.linkname) {
+ var linkSink = new LinkSink()
+ eos(linkSink, function (err) {
+ if (err) { // stream was closed
+ self.destroy()
+ return callback(err)
+ }
+
+ header.linkname = linkSink.linkname
+ self._encode(header)
+ callback()
+ })
+
+ return linkSink
+ }
+
+ this._encode(header)
+
+ if (header.type !== 'file' && header.type !== 'contiguous-file') {
+ process.nextTick(callback)
+ return new Void()
+ }
+
+ var sink = new Sink(this)
+
+ this._stream = sink
+
+ eos(sink, function (err) {
+ self._stream = null
+
+ if (err) { // stream was closed
+ self.destroy()
+ return callback(err)
+ }
+
+ if (sink.written !== header.size) { // corrupting tar
+ self.destroy()
+ return callback(new Error('size mismatch'))
+ }
+
+ overflow(self, header.size)
+ if (self._finalizing) self.finalize()
+ callback()
+ })
+
+ return sink
+}
+
+Pack.prototype.finalize = function () {
+ if (this._stream) {
+ this._finalizing = true
+ return
+ }
+
+ if (this._finalized) return
+ this._finalized = true
+ this.push(END_OF_TAR)
+ this.push(null)
+}
+
+Pack.prototype.destroy = function (err) {
+ if (this._destroyed) return
+ this._destroyed = true
+
+ if (err) this.emit('error', err)
+ this.emit('close')
+ if (this._stream && this._stream.destroy) this._stream.destroy()
+}
+
+Pack.prototype._encode = function (header) {
+ if (!header.pax) {
+ var buf = headers.encode(header)
+ if (buf) {
+ this.push(buf)
+ return
+ }
+ }
+ this._encodePax(header)
+}
+
+Pack.prototype._encodePax = function (header) {
+ var paxHeader = headers.encodePax({
+ name: header.name,
+ linkname: header.linkname,
+ pax: header.pax
+ })
+
+ var newHeader = {
+ name: 'PaxHeader',
+ mode: header.mode,
+ uid: header.uid,
+ gid: header.gid,
+ size: paxHeader.length,
+ mtime: header.mtime,
+ type: 'pax-header',
+ linkname: header.linkname && 'PaxHeader',
+ uname: header.uname,
+ gname: header.gname,
+ devmajor: header.devmajor,
+ devminor: header.devminor
+ }
+
+ this.push(headers.encode(newHeader))
+ this.push(paxHeader)
+ overflow(this, paxHeader.length)
+
+ newHeader.size = header.size
+ newHeader.type = header.type
+ this.push(headers.encode(newHeader))
+}
+
+Pack.prototype._read = function (n) {
+ var drain = this._drain
+ this._drain = noop
+ drain()
+}
+
+module.exports = Pack
diff --git a/package.json b/package.json
index c7c5a2c..8251874 100644
--- a/package.json
+++ b/package.json
@@ -1,24 +1,24 @@
{
"name": "tar-stream",
- "version": "0.4.3",
+ "version": "1.5.2",
"description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
- "repository": "git://github.com:mafintosh/tar-stream.git",
"author": "Mathias Buus <mathiasbuus at gmail.com>",
"engines": {
"node": ">= 0.8.0"
},
"dependencies": {
- "bl": "~0.8.1",
- "end-of-stream": "~0.1.3",
- "readable-stream": "~1.0.26-4",
- "xtend": "~3.0.0"
+ "bl": "^1.0.0",
+ "end-of-stream": "^1.0.0",
+ "readable-stream": "^2.0.0",
+ "xtend": "^4.0.0"
},
"devDependencies": {
- "tap": "~0.4.6",
- "concat-stream": "~1.2.1"
+ "concat-stream": "^1.4.6",
+ "standard": "^5.3.1",
+ "tape": "^3.0.3"
},
"scripts": {
- "test": "tap test/*.js"
+ "test": "standard && tape test/*.js"
},
"keywords": [
"tar",
@@ -35,5 +35,22 @@
"pack",
"extract",
"modify"
- ]
+ ],
+ "bugs": {
+ "url": "https://github.com/mafintosh/tar-stream/issues"
+ },
+ "homepage": "https://github.com/mafintosh/tar-stream",
+ "main": "index.js",
+ "files": [
+ "*.js",
+ "LICENSE"
+ ],
+ "directories": {
+ "test": "test"
+ },
+ "license": "MIT",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/mafintosh/tar-stream.git"
+ }
}
diff --git a/test/extract.js b/test/extract.js
index c1e4fd3..6c95d37 100644
--- a/test/extract.js
+++ b/test/extract.js
@@ -1,420 +1,505 @@
-var test = require('tap').test;
-var tar = require('../index');
-var fixtures = require('./fixtures');
-var concat = require('concat-stream');
-var fs = require('fs');
-
-var clamp = function(index, len, defaultValue) {
- if (typeof index !== 'number') return defaultValue;
- index = ~~index; // Coerce to integer.
- if (index >= len) return len;
- if (index >= 0) return index;
- index += len;
- if (index >= 0) return index;
- return 0;
-};
-
-test('one-file', function(t) {
- t.plan(3);
-
- var extract = tar.extract();
- var noEntries = false;
-
- extract.on('entry', function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'test.txt',
- mode: 0644,
- uid: 501,
- gid: 20,
- size: 12,
- mtime: new Date(1387580181000),
- type: 'file',
- linkname: null,
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
-
- stream.pipe(concat(function(data) {
- noEntries = true;
- t.same(data.toString(), 'hello world\n');
- callback();
- }));
- });
-
- extract.on('finish', function() {
- t.ok(noEntries);
- });
-
- extract.end(fs.readFileSync(fixtures.ONE_FILE_TAR));
-});
-
-test('chunked-one-file', function(t) {
- t.plan(3);
-
- var extract = tar.extract();
- var noEntries = false;
-
- extract.on('entry', function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'test.txt',
- mode: 0644,
- uid: 501,
- gid: 20,
- size: 12,
- mtime: new Date(1387580181000),
- type: 'file',
- linkname: null,
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
-
- stream.pipe(concat(function(data) {
- noEntries = true;
- t.same(data.toString(), 'hello world\n');
- callback();
- }));
- });
-
- extract.on('finish', function() {
- t.ok(noEntries);
- });
-
- var b = fs.readFileSync(fixtures.ONE_FILE_TAR);
-
- for (var i = 0; i < b.length; i += 321) {
- extract.write(b.slice(i, clamp(i+321, b.length, b.length)));
- }
- extract.end();
-});
-
-
-test('multi-file', function(t) {
- t.plan(5);
-
- var extract = tar.extract();
- var noEntries = false;
-
- var onfile1 = function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'file-1.txt',
- mode: 0644,
- uid: 501,
- gid: 20,
- size: 12,
- mtime: new Date(1387580181000),
- type: 'file',
- linkname: null,
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
-
- extract.on('entry', onfile2);
- stream.pipe(concat(function(data) {
- t.same(data.toString(), 'i am file-1\n');
- callback();
- }));
- };
-
- var onfile2 = function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'file-2.txt',
- mode: 0644,
- uid: 501,
- gid: 20,
- size: 12,
- mtime: new Date(1387580181000),
- type: 'file',
- linkname: null,
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
-
- stream.pipe(concat(function(data) {
- noEntries = true;
- t.same(data.toString(), 'i am file-2\n');
- callback();
- }));
- };
-
- extract.once('entry', onfile1);
-
- extract.on('finish', function() {
- t.ok(noEntries);
- });
-
- extract.end(fs.readFileSync(fixtures.MULTI_FILE_TAR));
-});
-
-test('chunked-multi-file', function(t) {
- t.plan(5);
-
- var extract = tar.extract();
- var noEntries = false;
-
- var onfile1 = function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'file-1.txt',
- mode: 0644,
- uid: 501,
- gid: 20,
- size: 12,
- mtime: new Date(1387580181000),
- type: 'file',
- linkname: null,
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
-
- extract.on('entry', onfile2);
- stream.pipe(concat(function(data) {
- t.same(data.toString(), 'i am file-1\n');
- callback();
- }));
- };
-
- var onfile2 = function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'file-2.txt',
- mode: 0644,
- uid: 501,
- gid: 20,
- size: 12,
- mtime: new Date(1387580181000),
- type: 'file',
- linkname: null,
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
-
- stream.pipe(concat(function(data) {
- noEntries = true;
- t.same(data.toString(), 'i am file-2\n');
- callback();
- }));
- };
-
- extract.once('entry', onfile1);
-
- extract.on('finish', function() {
- t.ok(noEntries);
- });
-
- var b = fs.readFileSync(fixtures.MULTI_FILE_TAR);
- for (var i = 0; i < b.length; i += 321) {
- extract.write(b.slice(i, clamp(i+321, b.length, b.length)));
- }
- extract.end();
-});
-
-test('types', function(t) {
- t.plan(3);
-
- var extract = tar.extract();
- var noEntries = false;
-
- var ondir = function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'directory',
- mode: 0755,
- uid: 501,
- gid: 20,
- size: 0,
- mtime: new Date(1387580181000),
- type: 'directory',
- linkname: null,
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
- stream.on('data', function() {
- t.ok(false);
- });
- extract.once('entry', onlink);
- callback();
- };
-
- var onlink = function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'directory-link',
- mode: 0755,
- uid: 501,
- gid: 20,
- size: 0,
- mtime: new Date(1387580181000),
- type: 'symlink',
- linkname: 'directory',
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
- stream.on('data', function() {
- t.ok(false);
- });
- noEntries = true;
- callback();
- };
-
- extract.once('entry', ondir);
-
- extract.on('finish', function() {
- t.ok(noEntries);
- });
-
- extract.end(fs.readFileSync(fixtures.TYPES_TAR));
-});
-
-test('long-name', function(t) {
- t.plan(3);
-
- var extract = tar.extract();
- var noEntries = false;
-
- extract.on('entry', function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
- mode: 0644,
- uid: 501,
- gid: 20,
- size: 16,
- mtime: new Date(1387580181000),
- type: 'file',
- linkname: null,
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
-
- stream.pipe(concat(function(data) {
- noEntries = true;
- t.same(data.toString(), 'hello long name\n');
- callback();
- }));
- });
-
- extract.on('finish', function() {
- t.ok(noEntries);
- });
-
- extract.end(fs.readFileSync(fixtures.LONG_NAME_TAR));
-});
-
-test('unicode-bsd', function(t) { // can unpack a bsdtar unicoded tarball
- t.plan(3);
-
- var extract = tar.extract();
- var noEntries = false;
-
- extract.on('entry', function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'høllø.txt',
- mode: 0644,
- uid: 501,
- gid: 20,
- size: 4,
- mtime: new Date(1387588646000),
- type: 'file',
- linkname: null,
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
-
- stream.pipe(concat(function(data) {
- noEntries = true;
- t.same(data.toString(), 'hej\n');
- callback();
- }));
- });
-
- extract.on('finish', function() {
- t.ok(noEntries);
- });
-
- extract.end(fs.readFileSync(fixtures.UNICODE_BSD_TAR));
-});
-
-test('unicode', function(t) { // can unpack a bsdtar unicoded tarball
- t.plan(3);
-
- var extract = tar.extract();
- var noEntries = false;
-
- extract.on('entry', function(header, stream, callback) {
- t.deepEqual(header, {
- name: 'høstål.txt',
- mode: 0644,
- uid: 501,
- gid: 20,
- size: 8,
- mtime: new Date(1387580181000),
- type: 'file',
- linkname: null,
- uname: 'maf',
- gname: 'staff',
- devmajor: 0,
- devminor: 0
- });
-
- stream.pipe(concat(function(data) {
- noEntries = true;
- t.same(data.toString(), 'høllø\n');
- callback();
- }));
- });
-
- extract.on('finish', function() {
- t.ok(noEntries);
- });
-
- extract.end(fs.readFileSync(fixtures.UNICODE_TAR));
-});
-
-test('name-is-100', function(t) {
- t.plan(3);
-
- var extract = tar.extract();
-
- extract.on('entry', function(header, stream, callback) {
- t.same(header.name.length, 100);
-
- stream.pipe(concat(function(data) {
- t.same(data.toString(), 'hello\n');
- callback();
- }));
- });
-
- extract.on('finish', function() {
- t.ok(true);
- });
-
- extract.end(fs.readFileSync(fixtures.NAME_IS_100_TAR));
-});
-
-test('invalid-file', function(t) {
- t.plan(1);
-
- var extract = tar.extract();
-
- extract.on('error', function(err) {
- t.ok(!!err);
- extract.destroy();
- });
-
- extract.end(fs.readFileSync(fixtures.INVALID_TGZ));
-});
\ No newline at end of file
+var test = require('tape')
+var tar = require('../index')
+var fixtures = require('./fixtures')
+var concat = require('concat-stream')
+var fs = require('fs')
+
+var clamp = function (index, len, defaultValue) {
+ if (typeof index !== 'number') return defaultValue
+ index = ~~index // Coerce to integer.
+ if (index >= len) return len
+ if (index >= 0) return index
+ index += len
+ if (index >= 0) return index
+ return 0
+}
+
+test('one-file', function (t) {
+ t.plan(3)
+
+ var extract = tar.extract()
+ var noEntries = false
+
+ extract.on('entry', function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'test.txt',
+ mode: parseInt('644', 8),
+ uid: 501,
+ gid: 20,
+ size: 12,
+ mtime: new Date(1387580181000),
+ type: 'file',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+
+ stream.pipe(concat(function (data) {
+ noEntries = true
+ t.same(data.toString(), 'hello world\n')
+ callback()
+ }))
+ })
+
+ extract.on('finish', function () {
+ t.ok(noEntries)
+ })
+
+ extract.end(fs.readFileSync(fixtures.ONE_FILE_TAR))
+})
+
+test('chunked-one-file', function (t) {
+ t.plan(3)
+
+ var extract = tar.extract()
+ var noEntries = false
+
+ extract.on('entry', function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'test.txt',
+ mode: parseInt('644', 8),
+ uid: 501,
+ gid: 20,
+ size: 12,
+ mtime: new Date(1387580181000),
+ type: 'file',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+
+ stream.pipe(concat(function (data) {
+ noEntries = true
+ t.same(data.toString(), 'hello world\n')
+ callback()
+ }))
+ })
+
+ extract.on('finish', function () {
+ t.ok(noEntries)
+ })
+
+ var b = fs.readFileSync(fixtures.ONE_FILE_TAR)
+
+ for (var i = 0; i < b.length; i += 321) {
+ extract.write(b.slice(i, clamp(i + 321, b.length, b.length)))
+ }
+ extract.end()
+})
+
+test('multi-file', function (t) {
+ t.plan(5)
+
+ var extract = tar.extract()
+ var noEntries = false
+
+ var onfile1 = function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'file-1.txt',
+ mode: parseInt('644', 8),
+ uid: 501,
+ gid: 20,
+ size: 12,
+ mtime: new Date(1387580181000),
+ type: 'file',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+
+ extract.on('entry', onfile2)
+ stream.pipe(concat(function (data) {
+ t.same(data.toString(), 'i am file-1\n')
+ callback()
+ }))
+ }
+
+ var onfile2 = function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'file-2.txt',
+ mode: parseInt('644', 8),
+ uid: 501,
+ gid: 20,
+ size: 12,
+ mtime: new Date(1387580181000),
+ type: 'file',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+
+ stream.pipe(concat(function (data) {
+ noEntries = true
+ t.same(data.toString(), 'i am file-2\n')
+ callback()
+ }))
+ }
+
+ extract.once('entry', onfile1)
+
+ extract.on('finish', function () {
+ t.ok(noEntries)
+ })
+
+ extract.end(fs.readFileSync(fixtures.MULTI_FILE_TAR))
+})
+
+test('chunked-multi-file', function (t) {
+ t.plan(5)
+
+ var extract = tar.extract()
+ var noEntries = false
+
+ var onfile1 = function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'file-1.txt',
+ mode: parseInt('644', 8),
+ uid: 501,
+ gid: 20,
+ size: 12,
+ mtime: new Date(1387580181000),
+ type: 'file',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+
+ extract.on('entry', onfile2)
+ stream.pipe(concat(function (data) {
+ t.same(data.toString(), 'i am file-1\n')
+ callback()
+ }))
+ }
+
+ var onfile2 = function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'file-2.txt',
+ mode: parseInt('644', 8),
+ uid: 501,
+ gid: 20,
+ size: 12,
+ mtime: new Date(1387580181000),
+ type: 'file',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+
+ stream.pipe(concat(function (data) {
+ noEntries = true
+ t.same(data.toString(), 'i am file-2\n')
+ callback()
+ }))
+ }
+
+ extract.once('entry', onfile1)
+
+ extract.on('finish', function () {
+ t.ok(noEntries)
+ })
+
+ var b = fs.readFileSync(fixtures.MULTI_FILE_TAR)
+ for (var i = 0; i < b.length; i += 321) {
+ extract.write(b.slice(i, clamp(i + 321, b.length, b.length)))
+ }
+ extract.end()
+})
+
+test('pax', function (t) {
+ t.plan(3)
+
+ var extract = tar.extract()
+ var noEntries = false
+
+ extract.on('entry', function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'pax.txt',
+ mode: parseInt('644', 8),
+ uid: 501,
+ gid: 20,
+ size: 12,
+ mtime: new Date(1387580181000),
+ type: 'file',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0,
+ pax: { path: 'pax.txt', special: 'sauce' }
+ })
+
+ stream.pipe(concat(function (data) {
+ noEntries = true
+ t.same(data.toString(), 'hello world\n')
+ callback()
+ }))
+ })
+
+ extract.on('finish', function () {
+ t.ok(noEntries)
+ })
+
+ extract.end(fs.readFileSync(fixtures.PAX_TAR))
+})
+
+test('types', function (t) {
+ t.plan(3)
+
+ var extract = tar.extract()
+ var noEntries = false
+
+ var ondir = function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'directory',
+ mode: parseInt('755', 8),
+ uid: 501,
+ gid: 20,
+ size: 0,
+ mtime: new Date(1387580181000),
+ type: 'directory',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+ stream.on('data', function () {
+ t.ok(false)
+ })
+ extract.once('entry', onlink)
+ callback()
+ }
+
+ var onlink = function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'directory-link',
+ mode: parseInt('755', 8),
+ uid: 501,
+ gid: 20,
+ size: 0,
+ mtime: new Date(1387580181000),
+ type: 'symlink',
+ linkname: 'directory',
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+ stream.on('data', function () {
+ t.ok(false)
+ })
+ noEntries = true
+ callback()
+ }
+
+ extract.once('entry', ondir)
+
+ extract.on('finish', function () {
+ t.ok(noEntries)
+ })
+
+ extract.end(fs.readFileSync(fixtures.TYPES_TAR))
+})
+
+test('long-name', function (t) {
+ t.plan(3)
+
+ var extract = tar.extract()
+ var noEntries = false
+
+ extract.on('entry', function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
+ mode: parseInt('644', 8),
+ uid: 501,
+ gid: 20,
+ size: 16,
+ mtime: new Date(1387580181000),
+ type: 'file',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+
+ stream.pipe(concat(function (data) {
+ noEntries = true
+ t.same(data.toString(), 'hello long name\n')
+ callback()
+ }))
+ })
+
+ extract.on('finish', function () {
+ t.ok(noEntries)
+ })
+
+ extract.end(fs.readFileSync(fixtures.LONG_NAME_TAR))
+})
+
+test('unicode-bsd', function (t) { // can unpack a bsdtar unicoded tarball
+ t.plan(3)
+
+ var extract = tar.extract()
+ var noEntries = false
+
+ extract.on('entry', function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'høllø.txt',
+ mode: parseInt('644', 8),
+ uid: 501,
+ gid: 20,
+ size: 4,
+ mtime: new Date(1387588646000),
+ pax: {'SCHILY.dev': '16777217', 'SCHILY.ino': '3599143', 'SCHILY.nlink': '1', atime: '1387589077', ctime: '1387588646', path: 'høllø.txt'},
+ type: 'file',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+
+ stream.pipe(concat(function (data) {
+ noEntries = true
+ t.same(data.toString(), 'hej\n')
+ callback()
+ }))
+ })
+
+ extract.on('finish', function () {
+ t.ok(noEntries)
+ })
+
+ extract.end(fs.readFileSync(fixtures.UNICODE_BSD_TAR))
+})
+
+test('unicode', function (t) { // can unpack a bsdtar unicoded tarball
+ t.plan(3)
+
+ var extract = tar.extract()
+ var noEntries = false
+
+ extract.on('entry', function (header, stream, callback) {
+ t.deepEqual(header, {
+ name: 'høstål.txt',
+ mode: parseInt('644', 8),
+ uid: 501,
+ gid: 20,
+ size: 8,
+ mtime: new Date(1387580181000),
+ pax: {path: 'høstål.txt'},
+ type: 'file',
+ linkname: null,
+ uname: 'maf',
+ gname: 'staff',
+ devmajor: 0,
+ devminor: 0
+ })
+
+ stream.pipe(concat(function (data) {
+ noEntries = true
+ t.same(data.toString(), 'høllø\n')
+ callback()
+ }))
+ })
+
+ extract.on('finish', function () {
+ t.ok(noEntries)
+ })
+
+ extract.end(fs.readFileSync(fixtures.UNICODE_TAR))
+})
+
+test('name-is-100', function (t) {
+ t.plan(3)
+
+ var extract = tar.extract()
+
+ extract.on('entry', function (header, stream, callback) {
+ t.same(header.name.length, 100)
+
+ stream.pipe(concat(function (data) {
+ t.same(data.toString(), 'hello\n')
+ callback()
+ }))
+ })
+
+ extract.on('finish', function () {
+ t.ok(true)
+ })
+
+ extract.end(fs.readFileSync(fixtures.NAME_IS_100_TAR))
+})
+
+test('invalid-file', function (t) {
+ t.plan(1)
+
+ var extract = tar.extract()
+
+ extract.on('error', function (err) {
+ t.ok(!!err)
+ extract.destroy()
+ })
+
+ extract.end(fs.readFileSync(fixtures.INVALID_TGZ))
+})
+
+test('space prefixed', function (t) {
+ t.plan(5)
+
+ var extract = tar.extract()
+
+ extract.on('entry', function (header, stream, callback) {
+ t.ok(true)
+ callback()
+ })
+
+ extract.on('finish', function () {
+ t.ok(true)
+ })
+
+ extract.end(fs.readFileSync(fixtures.SPACE_TAR_GZ))
+})
+
+test('gnu long path', function (t) {
+ t.plan(2)
+
+ var extract = tar.extract()
+
+ extract.on('entry', function (header, stream, callback) {
+ t.ok(header.name.length > 100)
+ callback()
+ })
+
+ extract.on('finish', function () {
+ t.ok(true)
+ })
+
+ extract.end(fs.readFileSync(fixtures.GNU_LONG_PATH))
+})
+
+test('base 256 uid and gid', function (t) {
+ t.plan(2)
+ var extract = tar.extract()
+
+ extract.on('entry', function (header, stream, callback) {
+ t.ok(header.uid === 116435139)
+ t.ok(header.gid === 1876110778)
+ callback()
+ })
+
+ extract.end(fs.readFileSync(fixtures.BASE_256_UID_GID))
+})
diff --git a/test/fixtures/base-256-uid-gid.tar b/test/fixtures/base-256-uid-gid.tar
new file mode 100644
index 0000000..6e3d6e3
Binary files /dev/null and b/test/fixtures/base-256-uid-gid.tar differ
diff --git a/test/fixtures/gnu-long-path.tar b/test/fixtures/gnu-long-path.tar
new file mode 100644
index 0000000..efb204c
Binary files /dev/null and b/test/fixtures/gnu-long-path.tar differ
diff --git a/test/fixtures/index.js b/test/fixtures/index.js
index 59bc87b..eb18dbd 100644
--- a/test/fixtures/index.js
+++ b/test/fixtures/index.js
@@ -1,10 +1,14 @@
-var path = require('path');
+var path = require('path')
-exports.ONE_FILE_TAR = path.join(__dirname, 'one-file.tar');
-exports.MULTI_FILE_TAR = path.join(__dirname, 'multi-file.tar');
-exports.TYPES_TAR = path.join(__dirname, 'types.tar');
-exports.LONG_NAME_TAR = path.join(__dirname, 'long-name.tar');
-exports.UNICODE_BSD_TAR = path.join(__dirname, 'unicode-bsd.tar');
-exports.UNICODE_TAR = path.join(__dirname, 'unicode.tar');
-exports.NAME_IS_100_TAR = path.join(__dirname, 'name-is-100.tar');
-exports.INVALID_TGZ = path.join(__dirname, 'invalid.tgz');
\ No newline at end of file
+exports.ONE_FILE_TAR = path.join(__dirname, 'one-file.tar')
+exports.MULTI_FILE_TAR = path.join(__dirname, 'multi-file.tar')
+exports.PAX_TAR = path.join(__dirname, 'pax.tar')
+exports.TYPES_TAR = path.join(__dirname, 'types.tar')
+exports.LONG_NAME_TAR = path.join(__dirname, 'long-name.tar')
+exports.UNICODE_BSD_TAR = path.join(__dirname, 'unicode-bsd.tar')
+exports.UNICODE_TAR = path.join(__dirname, 'unicode.tar')
+exports.NAME_IS_100_TAR = path.join(__dirname, 'name-is-100.tar')
+exports.INVALID_TGZ = path.join(__dirname, 'invalid.tgz')
+exports.SPACE_TAR_GZ = path.join(__dirname, 'space.tar')
+exports.GNU_LONG_PATH = path.join(__dirname, 'gnu-long-path.tar')
+exports.BASE_256_UID_GID = path.join(__dirname, 'base-256-uid-gid.tar')
diff --git a/test/fixtures/pax.tar b/test/fixtures/pax.tar
new file mode 100644
index 0000000..f0d9d2f
Binary files /dev/null and b/test/fixtures/pax.tar differ
diff --git a/test/fixtures/space.tar b/test/fixtures/space.tar
new file mode 100644
index 0000000..0bd7cea
Binary files /dev/null and b/test/fixtures/space.tar differ
diff --git a/test/pack.js b/test/pack.js
index 69a7680..49f7a90 100644
--- a/test/pack.js
+++ b/test/pack.js
@@ -1,144 +1,169 @@
-var test = require('tap').test;
-var tar = require('../index');
-var fixtures = require('./fixtures');
-var concat = require('concat-stream');
-var fs = require('fs');
-
-test('one-file', function(t) {
- t.plan(2);
-
- var pack = tar.pack();
-
- pack.entry({
- name:'test.txt',
- mtime:new Date(1387580181000),
- mode:0644,
- uname:'maf',
- gname:'staff',
- uid:501,
- gid:20
- }, 'hello world\n');
-
- pack.finalize();
-
- pack.pipe(concat(function(data) {
- t.same(data.length & 511, 0);
- t.deepEqual(data, fs.readFileSync(fixtures.ONE_FILE_TAR));
- }));
-});
-
-test('multi-file', function(t) {
- t.plan(2);
-
- var pack = tar.pack();
-
- pack.entry({
- name:'file-1.txt',
- mtime:new Date(1387580181000),
- mode:0644,
- uname:'maf',
- gname:'staff',
- uid:501,
- gid:20
- }, 'i am file-1\n');
-
- pack.entry({
- name:'file-2.txt',
- mtime:new Date(1387580181000),
- mode:0644,
- size:12,
- uname:'maf',
- gname:'staff',
- uid:501,
- gid:20
- }).end('i am file-2\n');
-
- pack.finalize();
-
- pack.pipe(concat(function(data) {
- t.same(data.length & 511, 0);
- t.deepEqual(data, fs.readFileSync(fixtures.MULTI_FILE_TAR));
- }));
-});
-
-test('types', function(t) {
- t.plan(2);
- var pack = tar.pack();
-
- pack.entry({
- name:'directory',
- mtime:new Date(1387580181000),
- type:'directory',
- mode:0755,
- uname:'maf',
- gname:'staff',
- uid:501,
- gid:20
- });
-
- pack.entry({
- name:'directory-link',
- mtime:new Date(1387580181000),
- type:'symlink',
- linkname: 'directory',
- mode:0755,
- uname:'maf',
- gname:'staff',
- uid:501,
- gid:20
- });
-
- pack.finalize();
-
- pack.pipe(concat(function(data) {
- t.equal(data.length & 511, 0);
- t.deepEqual(data, fs.readFileSync(fixtures.TYPES_TAR));
- }));
-
-});
-
-test('long-name', function(t) {
- t.plan(2);
- var pack = tar.pack();
-
- pack.entry({
- name:'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
- mtime:new Date(1387580181000),
- type:'file',
- mode:0644,
- uname:'maf',
- gname:'staff',
- uid:501,
- gid:20
- }, 'hello long name\n');
-
- pack.finalize();
-
- pack.pipe(concat(function(data) {
- t.equal(data.length & 511, 0);
- t.deepEqual(data, fs.readFileSync(fixtures.LONG_NAME_TAR));
- }));
-});
-
-test('unicode', function(t) {
- t.plan(2);
- var pack = tar.pack();
-
- pack.entry({
- name:'høstål.txt',
- mtime:new Date(1387580181000),
- type:'file',
- mode:0644,
- uname:'maf',
- gname:'staff',
- uid:501,
- gid:20
- }, 'høllø\n');
-
- pack.finalize();
-
- pack.pipe(concat(function(data) {
- t.equal(data.length & 511, 0);
- t.deepEqual(data, fs.readFileSync(fixtures.UNICODE_TAR));
- }));
-});
\ No newline at end of file
+var test = require('tape')
+var tar = require('../index')
+var fixtures = require('./fixtures')
+var concat = require('concat-stream')
+var fs = require('fs')
+
+test('one-file', function (t) {
+ t.plan(2)
+
+ var pack = tar.pack()
+
+ pack.entry({
+ name: 'test.txt',
+ mtime: new Date(1387580181000),
+ mode: parseInt('644', 8),
+ uname: 'maf',
+ gname: 'staff',
+ uid: 501,
+ gid: 20
+ }, 'hello world\n')
+
+ pack.finalize()
+
+ pack.pipe(concat(function (data) {
+ t.same(data.length & 511, 0)
+ t.deepEqual(data, fs.readFileSync(fixtures.ONE_FILE_TAR))
+ }))
+})
+
+test('multi-file', function (t) {
+ t.plan(2)
+
+ var pack = tar.pack()
+
+ pack.entry({
+ name: 'file-1.txt',
+ mtime: new Date(1387580181000),
+ mode: parseInt('644', 8),
+ uname: 'maf',
+ gname: 'staff',
+ uid: 501,
+ gid: 20
+ }, 'i am file-1\n')
+
+ pack.entry({
+ name: 'file-2.txt',
+ mtime: new Date(1387580181000),
+ mode: parseInt('644', 8),
+ size: 12,
+ uname: 'maf',
+ gname: 'staff',
+ uid: 501,
+ gid: 20
+ }).end('i am file-2\n')
+
+ pack.finalize()
+
+ pack.pipe(concat(function (data) {
+ t.same(data.length & 511, 0)
+ t.deepEqual(data, fs.readFileSync(fixtures.MULTI_FILE_TAR))
+ }))
+})
+
+test('pax', function (t) {
+ t.plan(2)
+
+ var pack = tar.pack()
+
+ pack.entry({
+ name: 'pax.txt',
+ mtime: new Date(1387580181000),
+ mode: parseInt('644', 8),
+ uname: 'maf',
+ gname: 'staff',
+ uid: 501,
+ gid: 20,
+ pax: {special: 'sauce'}
+ }, 'hello world\n')
+
+ pack.finalize()
+
+ pack.pipe(concat(function (data) {
+ // fs.writeFileSync('tmp.tar', data)
+ t.same(data.length & 511, 0)
+ t.deepEqual(data, fs.readFileSync(fixtures.PAX_TAR))
+ }))
+})
+
+test('types', function (t) {
+ t.plan(2)
+ var pack = tar.pack()
+
+ pack.entry({
+ name: 'directory',
+ mtime: new Date(1387580181000),
+ type: 'directory',
+ mode: parseInt('755', 8),
+ uname: 'maf',
+ gname: 'staff',
+ uid: 501,
+ gid: 20
+ })
+
+ pack.entry({
+ name: 'directory-link',
+ mtime: new Date(1387580181000),
+ type: 'symlink',
+ linkname: 'directory',
+ mode: parseInt('755', 8),
+ uname: 'maf',
+ gname: 'staff',
+ uid: 501,
+ gid: 20,
+ size: 9 // Should convert to zero
+ })
+
+ pack.finalize()
+
+ pack.pipe(concat(function (data) {
+ t.equal(data.length & 511, 0)
+ t.deepEqual(data, fs.readFileSync(fixtures.TYPES_TAR))
+ }))
+})
+
+test('long-name', function (t) {
+ t.plan(2)
+ var pack = tar.pack()
+
+ pack.entry({
+ name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
+ mtime: new Date(1387580181000),
+ type: 'file',
+ mode: parseInt('644', 8),
+ uname: 'maf',
+ gname: 'staff',
+ uid: 501,
+ gid: 20
+ }, 'hello long name\n')
+
+ pack.finalize()
+
+ pack.pipe(concat(function (data) {
+ t.equal(data.length & 511, 0)
+ t.deepEqual(data, fs.readFileSync(fixtures.LONG_NAME_TAR))
+ }))
+})
+
+test('unicode', function (t) {
+ t.plan(2)
+ var pack = tar.pack()
+
+ pack.entry({
+ name: 'høstål.txt',
+ mtime: new Date(1387580181000),
+ type: 'file',
+ mode: parseInt('644', 8),
+ uname: 'maf',
+ gname: 'staff',
+ uid: 501,
+ gid: 20
+ }, 'høllø\n')
+
+ pack.finalize()
+
+ pack.pipe(concat(function (data) {
+ t.equal(data.length & 511, 0)
+ t.deepEqual(data, fs.readFileSync(fixtures.UNICODE_TAR))
+ }))
+})
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/node-tar-stream.git
More information about the Pkg-javascript-commits
mailing list