[Pkg-javascript-commits] [node-decompress-zip] 01/02: Imported Upstream version 0.1.0

Sebastiaan Couwenberg sebastic at moszumanska.debian.org
Sun Mar 1 15:42:12 UTC 2015


This is an automated email from the git hooks/post-receive script.

sebastic pushed a commit to branch master
in repository node-decompress-zip.

commit dd5ef8f3644cb40bd6dc0312d254448b9622afa8
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date:   Sun Mar 1 15:39:45 2015 +0100

    Imported Upstream version 0.1.0
---
 .editorconfig           |  16 +++
 .gitattributes          |   1 +
 .gitignore              |   3 +
 .jshintrc               |  62 ++++++++++
 .travis.yml             |   5 +
 Gruntfile.js            |  62 ++++++++++
 README.md               |  73 +++++++++++
 bin/decompress-zip      |  83 +++++++++++++
 download-test-assets.js |  45 +++++++
 lib/decompress-zip.js   | 312 ++++++++++++++++++++++++++++++++++++++++++++++++
 lib/extractors.js       | 179 +++++++++++++++++++++++++++
 lib/file-details.js     |  37 ++++++
 lib/signatures.js       |  10 ++
 lib/structures.js       | 228 +++++++++++++++++++++++++++++++++++
 package.json            |  57 +++++++++
 test/test.js            | 181 ++++++++++++++++++++++++++++
 16 files changed, 1354 insertions(+)

diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..6a804c2
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,16 @@
+root = true
+
+[*]
+indent_style = space
+indent_size = 4
+end_of_line = lf
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
+
+[package.json]
+indent_style = space
+indent_size = 2
+
+[*.md]
+trim_trailing_whitespace = false
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000..176a458
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+* text=auto
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..d9301a2
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+/node_modules
+/npm-debug.log
+/test/assets
diff --git a/.jshintrc b/.jshintrc
new file mode 100644
index 0000000..2d3e748
--- /dev/null
+++ b/.jshintrc
@@ -0,0 +1,62 @@
+{
+    "predef": [
+        "console",
+        "describe",
+        "it",
+        "after",
+        "afterEach",
+        "before",
+        "beforeEach"
+    ],
+
+    "indent": 4,
+    "node": true,
+    "devel": true,
+
+    "bitwise": false,
+    "curly": false,
+    "eqeqeq": true,
+    "forin": false,
+    "immed": true,
+    "latedef": false,
+    "newcap": true,
+    "noarg": true,
+    "noempty": false,
+    "nonew": true,
+    "plusplus": false,
+    "regexp": false,
+    "undef": true,
+    "unused": "vars",
+    "quotmark": "single",
+    "strict": false,
+    "trailing": true,
+    "camelcase": true,
+
+    "asi": false,
+    "boss": true,
+    "debug": false,
+    "eqnull": true,
+    "es5": false,
+    "esnext": false,
+    "evil": false,
+    "expr": false,
+    "funcscope": false,
+    "globalstrict": false,
+    "iterator": false,
+    "lastsemic": false,
+    "laxbreak": true,
+    "laxcomma": false,
+    "loopfunc": true,
+    "multistr": false,
+    "onecase": true,
+    "regexdash": false,
+    "scripturl": false,
+    "smarttabs": false,
+    "shadow": false,
+    "sub": false,
+    "supernew": true,
+    "validthis": false,
+
+    "nomen": false,
+    "white": true
+}
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..ad2e6b9
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,5 @@
+language: node_js
+node_js:
+ - '0.10'
+before_script:
+ - grunt test-files
diff --git a/Gruntfile.js b/Gruntfile.js
new file mode 100644
index 0000000..56e7eea
--- /dev/null
+++ b/Gruntfile.js
@@ -0,0 +1,62 @@
+'use strict';
+module.exports = function (grunt) {
+    grunt.initConfig({
+        jshint: {
+            options: {
+                jshintrc: '.jshintrc'
+            },
+            files: [
+                'Gruntfile.js',
+                'bin/*',
+                'lib/**/*.js',
+                'test/*.js'
+            ]
+        },
+        simplemocha: {
+            options: {
+                reporter: 'spec',
+                timeout: '5000'
+            },
+            full: {
+                src: [
+                    'test/*.js'
+                ]
+            },
+            short: {
+                options: {
+                    reporter: 'dot'
+                },
+                src: [
+                    '<%= simplemocha.full.src %>'
+                ]
+            }
+        },
+        exec: {
+            coverage: {
+                command: 'node node_modules/istanbul/lib/cli.js cover --dir ./coverage node_modules/mocha/bin/_mocha -- -R dot test/*.js'
+            },
+            'test-files': {
+                command: 'node download-test-assets.js'
+            }
+        },
+        watch: {
+            files: [
+                '<%= jshint.files %>'
+            ],
+            tasks: [
+                'jshint',
+                'simplemocha:short'
+            ]
+        }
+    });
+
+    grunt.loadNpmTasks('grunt-contrib-jshint');
+    grunt.loadNpmTasks('grunt-contrib-watch');
+    grunt.loadNpmTasks('grunt-simple-mocha');
+    grunt.loadNpmTasks('grunt-exec');
+
+    grunt.registerTask('test', ['jshint', 'simplemocha:full']);
+    grunt.registerTask('coverage', 'exec:coverage');
+    grunt.registerTask('test-files', 'exec:test-files');
+    grunt.registerTask('default', 'test');
+};
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..e447fd5
--- /dev/null
+++ b/README.md
@@ -0,0 +1,73 @@
+# decompress-zip [![Build Status](https://travis-ci.org/bower/decompress-zip.svg?branch=master)](https://travis-ci.org/bower/decompress-zip)
+
+> Extract files from a ZIP archive
+
+
+## Usage
+
+### .extract(options)
+
+Extracts the contents of the ZIP archive `file`.
+
+Returns an EventEmitter with two possible events - `error` on an error, and `extract` when the extraction has completed. The value passed to the `extract` event is a basic log of each file and how it was compressed.
+
+**Options**
+- **path** *String* - Path to extract into (default `.`)
+- **follow** *Boolean* - If true, rather than create stored symlinks as symlinks make a shallow copy of the target instead (default `false`)
+- **filter** *Function* - A function that will be called once for each file in the archive. It takes one argument which is an object containing details of the file. Return true for any file that you want to extract, and false otherwise. (default `null`)
+- **strip** *Number* - Remove leading folders in the path structure. Equivalent to `--strip-components` for tar.
+
+```js
+var DecompressZip = require('decompress-zip');
+var unzipper = new DecompressZip(filename)
+
+unzipper.on('error', function (err) {
+    console.log('Caught an error');
+});
+
+unzipper.on('extract', function (log) {
+    console.log('Finished extracting');
+});
+
+unzipper.on('progress', function (fileIndex, fileCount) {
+    console.log('Extracted file ' + (fileIndex + 1) + ' of ' + fileCount);
+});
+
+unzipper.extract({
+    path: 'some/path',
+    filter: function (file) {
+        return file.type !== "SymbolicLink";
+    }
+});
+```
+
+If `path` does not exist, decompress-zip will attempt to create it first.
+
+### .list()
+
+Much like extract, except:
+- the success event is `list`
+- the data for the event is an array of paths
+- no files are actually extracted
+- there are no options
+
+```js
+var DecompressZip = require('decompress-zip');
+var unzipper = new DecompressZip(filename)
+
+unzipper.on('error', function (err) {
+    console.log('Caught an error');
+});
+
+unzipper.on('list', function (files) {
+    console.log('The archive contains:');
+    console.log(files);
+});
+
+unzipper.list();
+```
+
+
+## License
+
+MIT © Bower team
diff --git a/bin/decompress-zip b/bin/decompress-zip
new file mode 100755
index 0000000..e64348b
--- /dev/null
+++ b/bin/decompress-zip
@@ -0,0 +1,83 @@
+#!/usr/bin/env node
+'use strict';
+var nopt = require('nopt');
+var path = require('path');
+var version = require('../package.json').version;
+
+var knownOptions = {
+    'list': Boolean,
+    'extract': Boolean,
+    'path': path
+};
+
+var shortcuts = {
+    'x': ['--extract'],
+    'l': ['--list'],
+    'p': ['--path'],
+    'v': ['--version']
+};
+
+var parsedOptions = nopt(knownOptions, shortcuts);
+
+var pad = function (string, length) {
+    string = String(string);
+
+    if (length <= string.length) {
+        return string;
+    }
+
+    return string + (new Array(length - string.length).join(' '));
+};
+
+var octal = function (number, digits) {
+    var result = '';
+
+    for (var i = 0; i < digits; i++) {
+        result = (number & 0x07) + result;
+        number >>= 3;
+    }
+
+    return result;
+};
+
+var DecompressZip = require('../lib/decompress-zip');
+var zip = new DecompressZip(parsedOptions.argv.remain[0]);
+
+zip.on('file', function (file) {
+    console.log([octal(file.mode, 4), pad(file.type, 13), pad(file.compressedSize, 10), pad(file.uncompressedSize, 10), file.path].join(' '));
+});
+
+zip.on('list', function (fileList) {
+    //  console.log(fileList);
+});
+
+zip.on('extract', function (result) {
+    console.log(result);
+});
+
+zip.on('error', function (error) {
+    console.error(error.message, error.stack);
+});
+
+if (parsedOptions.version) {
+    console.log('version ' + version);
+} else if (parsedOptions.list) {
+    console.log('Mode Type         Zip size  Full size Path');
+    console.log('---- ----         --------  --------- ----');
+    zip.list();
+} else if (parsedOptions.extract) {
+    var options = {};
+
+    if (parsedOptions.path) {
+        options.path = parsedOptions.path;
+    }
+
+    zip.extract(options);
+} else {
+    console.log('Usage: decompress-zip <options> <file>');
+    console.log('    -x, --extract      extract the given file');
+    console.log('    -l, --list         list the contents of the given file');
+    console.log('    -v, --version      extract the given file');
+    console.log('    -p, --path <path>  extract the file into <path>');
+    console.log('    -h, --help         show this message');
+}
diff --git a/download-test-assets.js b/download-test-assets.js
new file mode 100644
index 0000000..e4db1a7
--- /dev/null
+++ b/download-test-assets.js
@@ -0,0 +1,45 @@
+'use strict';
+var fs = require('fs');
+var request = require('request');
+var tmp = require('tmp');
+var exec = require('child_process').exec;
+var path = require('path');
+
+var url = 'https://drive.google.com/uc?id=0Bxxp2pVhWG1DTFNWQ1hsSkZKZmM&export=download';
+
+var errorHandler = function (err) {
+    throw err;
+};
+
+var extract = function (filename) {
+    exec('tar -xvzf ' + filename, {
+        cwd: path.join(__dirname, 'test'),
+        maxBuffer: 1024 * 1024
+    }, function (err, stdout, stderr) {
+        if (err) {
+            throw err;
+        }
+
+        console.log('Done');
+    });
+};
+
+tmp.file({
+    prefix: 'assets',
+    postfix: '.tgz'
+}, function (err, filename, fd) {
+    console.log('Downloading ' + url + ' to ' + filename);
+
+    var read = request(url);
+    var write = fs.createWriteStream(filename);
+
+    read.on('error', errorHandler);
+    write.on('error', errorHandler);
+
+    // For node 0.8 we can't just use the 'finish' event of the pipe
+    read.on('end', function () {
+        write.end(extract.bind(null, filename));
+    });
+
+    read.pipe(write, {end: false});
+});
diff --git a/lib/decompress-zip.js b/lib/decompress-zip.js
new file mode 100644
index 0000000..db0f8d1
--- /dev/null
+++ b/lib/decompress-zip.js
@@ -0,0 +1,312 @@
+'use strict';
+
+// The zip file spec is at http://www.pkware.com/documents/casestudies/APPNOTE.TXT
+// TODO: There is fair chunk of the spec that I have ignored. Need to add
+// assertions everywhere to make sure that we are not dealing with a ZIP type
+// that I haven't designed for. Things like spanning archives, non-DEFLATE
+// compression, encryption, etc.
+var fs = require('graceful-fs');
+var Q = require('q');
+var path = require('path');
+var util = require('util');
+var events = require('events');
+var structures = require('./structures');
+var signatures = require('./signatures');
+var extractors = require('./extractors');
+var FileDetails = require('./file-details');
+
+var fstat = Q.denodeify(fs.fstat);
+var read = Q.denodeify(fs.read);
+var fopen = Q.denodeify(fs.open);
+
+function DecompressZip(filename) {
+    events.EventEmitter.call(this);
+
+    this.filename = filename;
+    this.stats = null;
+    this.fd = null;
+    this.chunkSize = 1024 * 1024; // Buffer up to 1Mb at a time
+    this.dirCache = {};
+
+    // When we need a resource, we should check if there is a promise for it
+    // already and use that. If the promise is already fulfilled we don't do the
+    // async work again and we get to queue up dependant tasks.
+    this._p = {}; // _p instead of _promises because it is a lot easier to read
+}
+
+util.inherits(DecompressZip, events.EventEmitter);
+
+DecompressZip.prototype.openFile = function () {
+    return fopen(this.filename, 'r');
+};
+
+DecompressZip.prototype.closeFile = function () {
+    if (this.fd) {
+        fs.closeSync(this.fd);
+        this.fd = null;
+    }
+};
+
+DecompressZip.prototype.statFile = function (fd) {
+    this.fd = fd;
+    return fstat(fd);
+};
+
+DecompressZip.prototype.list = function () {
+    var self = this;
+
+    this.getFiles()
+    .then(function (files) {
+        var result = [];
+
+        files.forEach(function (file) {
+            result.push(file.path);
+        });
+
+        self.emit('list', result);
+    })
+    .fail(function (error) {
+        self.emit('error', error);
+    })
+    .fin(self.closeFile.bind(self));
+
+    return this;
+};
+
+DecompressZip.prototype.extract = function (options) {
+    var self = this;
+
+    options = options || {};
+    options.path = options.path || '.';
+    options.filter = options.filter || null;
+    options.follow = !!options.follow;
+    options.strip = +options.strip || 0;
+
+    this.getFiles()
+    .then(function (files) {
+        var copies = [];
+
+        if (options.filter) {
+            files = files.filter(options.filter);
+        }
+
+        if (options.follow) {
+            copies = files.filter(function (file) {
+                return file.type === 'SymbolicLink';
+            });
+            files = files.filter(function (file) {
+                return file.type !== 'SymbolicLink';
+            });
+        }
+
+        if (options.strip) {
+            files = files.map(function (file) {
+                if (file.type !== 'Directory') {
+                    // we don't use `path.sep` as we're using `/` in Windows too
+                    var dir = file.parent.split('/');
+                    var filename = file.filename;
+
+                    if (options.strip > dir.length) {
+                        throw new Error('You cannot strip more levels than there are directories');
+                    } else {
+                        dir = dir.slice(options.strip);
+                    }
+
+                    file.path = path.join(dir.join(path.sep), filename);
+                    return file;
+                }
+            });
+        }
+
+        return self.extractFiles(files, options)
+        .then(self.extractFiles.bind(self, copies, options));
+    })
+    .then(function (results) {
+        self.emit('extract', results);
+    })
+    .fail(function (error) {
+        self.emit('error', error);
+    })
+    .fin(self.closeFile.bind(self));
+
+    return this;
+};
+
+// Utility methods
+DecompressZip.prototype.getSearchBuffer = function (stats) {
+    var size = Math.min(stats.size, this.chunkSize);
+    this.stats = stats;
+    return this.getBuffer(stats.size - size, stats.size);
+};
+
+DecompressZip.prototype.getBuffer = function (start, end) {
+    var size = end - start;
+    return read(this.fd, new Buffer(size), 0, size, start)
+    .then(function (result) {
+        return result[1];
+    });
+};
+
+DecompressZip.prototype.findEndOfDirectory = function (buffer) {
+    var index = buffer.length - 3;
+    var chunk = '';
+
+    // Apparently the ZIP spec is not very good and it is impossible to
+    // guarantee that you have read a zip file correctly, or to determine
+    // the location of the CD without hunting.
+    // Search backwards through the buffer, as it is very likely to be near the
+    // end of the file.
+    while (index > Math.max(buffer.length - this.chunkSize, 0) && chunk !== signatures.END_OF_CENTRAL_DIRECTORY) {
+        index--;
+        chunk = buffer.readUInt32LE(index);
+    }
+
+    if (chunk !== signatures.END_OF_CENTRAL_DIRECTORY) {
+        throw new Error('Could not find the End of Central Directory Record');
+    }
+
+    return buffer.slice(index);
+};
+
+// Directory here means the ZIP Central Directory, not a folder
+DecompressZip.prototype.readDirectory = function (recordBuffer) {
+    var record = structures.readEndRecord(recordBuffer);
+
+    return this.getBuffer(record.directoryOffset, record.directoryOffset + record.directorySize)
+    .then(structures.readDirectory.bind(null));
+};
+
+DecompressZip.prototype.getFiles = function () {
+    if (!this._p.getFiles) {
+        this._p.getFiles = this.openFile()
+        .then(this.statFile.bind(this))
+        .then(this.getSearchBuffer.bind(this))
+        .then(this.findEndOfDirectory.bind(this))
+        .then(this.readDirectory.bind(this))
+        .then(this.readFileEntries.bind(this));
+    }
+
+    return this._p.getFiles;
+};
+
+DecompressZip.prototype.readFileEntries = function (directory) {
+    var promises = [];
+    var files = [];
+    var self = this;
+
+    directory.forEach(function (directoryEntry, index) {
+        var start = directoryEntry.relativeOffsetOfLocalHeader;
+        var end = Math.min(self.stats.size, start + structures.maxFileEntrySize);
+        var fileDetails = new FileDetails(directoryEntry);
+
+        var promise = self.getBuffer(start, end)
+        .then(structures.readFileEntry.bind(null))
+        .then(function (fileEntry) {
+            var maxSize;
+
+            if (fileDetails.compressedSize > 0) {
+                maxSize = fileDetails.compressedSize;
+            } else {
+                maxSize = self.stats.size;
+
+                if (index < directory.length - 1) {
+                    maxSize = directory[index + 1].relativeOffsetOfLocalHeader;
+                }
+
+                maxSize -= start + fileEntry.entryLength;
+            }
+
+            fileDetails._offset = start + fileEntry.entryLength;
+            fileDetails._maxSize = maxSize;
+
+            self.emit('file', fileDetails);
+            files[index] = fileDetails;
+        });
+
+        promises.push(promise);
+    });
+
+    return Q.all(promises)
+    .then(function () {
+        return files;
+    });
+};
+
+DecompressZip.prototype.extractFiles = function (files, options, results) {
+    var promises = [];
+    var self = this;
+
+    results = results || [];
+    var fileIndex = 0;
+    files.forEach(function (file) {
+        var promise = self.extractFile(file, options)
+        .then(function (result) {
+            self.emit('progress', fileIndex++, files.length);
+            results.push(result);
+        });
+
+        promises.push(promise);
+    });
+
+    return Q.all(promises)
+    .then(function () {
+        return results;
+    });
+};
+
+DecompressZip.prototype.extractFile = function (file, options) {
+    var destination = path.join(options.path, file.path);
+
+    // Possible compression methods:
+    //    0 - The file is stored (no compression)
+    //    1 - The file is Shrunk
+    //    2 - The file is Reduced with compression factor 1
+    //    3 - The file is Reduced with compression factor 2
+    //    4 - The file is Reduced with compression factor 3
+    //    5 - The file is Reduced with compression factor 4
+    //    6 - The file is Imploded
+    //    7 - Reserved for Tokenizing compression algorithm
+    //    8 - The file is Deflated
+    //    9 - Enhanced Deflating using Deflate64(tm)
+    //   10 - PKWARE Data Compression Library Imploding (old IBM TERSE)
+    //   11 - Reserved by PKWARE
+    //   12 - File is compressed using BZIP2 algorithm
+    //   13 - Reserved by PKWARE
+    //   14 - LZMA (EFS)
+    //   15 - Reserved by PKWARE
+    //   16 - Reserved by PKWARE
+    //   17 - Reserved by PKWARE
+    //   18 - File is compressed using IBM TERSE (new)
+    //   19 - IBM LZ77 z Architecture (PFS)
+    //   97 - WavPack compressed data
+    //   98 - PPMd version I, Rev 1
+
+    if (file.type === 'Directory') {
+        return extractors.folder(file, destination, this);
+    }
+
+    if (file.type === 'File') {
+        switch (file.compressionMethod) {
+        case 0:
+            return extractors.store(file, destination, this);
+
+        case 8:
+            return extractors.deflate(file, destination, this);
+
+        default:
+            throw new Error('Unsupported compression type');
+        }
+    }
+
+    if (file.type === 'SymbolicLink') {
+        if (options.follow) {
+            return extractors.copy(file, destination, this, options.path);
+        } else {
+            return extractors.symlink(file, destination, this, options.path);
+        }
+    }
+
+    throw new Error('Unsupported file type "' + file.type + '"');
+};
+
+module.exports = DecompressZip;
diff --git a/lib/extractors.js b/lib/extractors.js
new file mode 100644
index 0000000..32b0a5b
--- /dev/null
+++ b/lib/extractors.js
@@ -0,0 +1,179 @@
+var stream = require('stream');
+if (!stream.Readable) {
+    var stream = require('readable-stream');
+}
+var fs = require('graceful-fs');
+var Q = require('q');
+var path = require('path');
+var zlib = require('zlib');
+var touch = Q.denodeify(require('touch'));
+var mkpath = Q.denodeify(require('mkpath'));
+var writeFile = Q.denodeify(fs.writeFile);
+var inflateRaw = Q.denodeify(zlib.inflateRaw);
+var symlink = Q.denodeify(fs.symlink);
+var stat = Q.denodeify(fs.stat);
+
+// Use a cache of promises for building the directory tree. This allows us to
+// correctly queue up file extractions for after their path has been created,
+// avoid trying to create the path twice and still be async.
+var mkdir = function (dir, cache) {
+    dir = path.normalize(path.resolve(process.cwd(), dir) + path.sep);
+
+    if (!cache[dir]) {
+        var parent;
+
+        if (fs.existsSync(dir)) {
+            parent = new Q();
+        } else {
+            parent = mkdir(path.dirname(dir), cache);
+        }
+
+        cache[dir] = parent.then(function () {
+            return mkpath(dir);
+        });
+    }
+
+    return cache[dir];
+};
+
+// Utility methods for writing output files
+var extractors = {
+    folder: function (folder, destination, zip) {
+        return mkdir(destination, zip.dirCache)
+        .then(function () {
+            return {folder: folder.path};
+        });
+    },
+    store: function (file, destination, zip) {
+        var writer;
+
+        if (file.uncompressedSize === 0) {
+            writer = touch.bind(null, destination);
+        } else if (file.uncompressedSize <= zip.chunkSize) {
+            writer = function () {
+                return zip.getBuffer(file._offset, file._offset + file.uncompressedSize)
+                .then(writeFile.bind(null, destination));
+            };
+        } else {
+            var input = new stream.Readable();
+            input.wrap(fs.createReadStream(zip.filename, {start: file._offset, end: file._offset + file.uncompressedSize - 1}));
+            writer = pipePromise.bind(null, input, destination);
+        }
+
+        return mkdir(path.dirname(destination), zip.dirCache)
+        .then(writer)
+        .then(function () {
+            return {stored: file.path};
+        });
+    },
+    deflate: function (file, destination, zip) {
+        // For Deflate you don't actually need to specify the end offset - and
+        // in fact many ZIP files don't include compressed file sizes for
+        // Deflated files so we don't even know what the end offset is.
+
+        return mkdir(path.dirname(destination), zip.dirCache)
+        .then(function () {
+            if (file._maxSize <= zip.chunkSize) {
+                return zip.getBuffer(file._offset, file._offset + file._maxSize)
+                .then(inflateRaw)
+                .then(function (buffer) {
+                    return writeFile(destination, buffer);
+                });
+            } else {
+                // For node 0.8 we need to create the Zlib stream and attach
+                // handlers in the same tick of the event loop, which is why we do
+                // the creation in here
+                var input = new stream.Readable();
+                input.wrap(fs.createReadStream(zip.filename, {start: file._offset}));
+                var inflater = input.pipe(zlib.createInflateRaw({highWaterMark: 32 * 1024}));
+
+                return pipePromise(inflater, destination);
+            }
+        })
+        .then(function () {
+            return {deflated: file.path};
+        });
+    },
+    symlink: function (file, destination, zip, basePath) {
+        var parent = path.dirname(destination);
+        return mkdir(parent, zip.dirCache)
+        .then(function () {
+            return getLinkLocation(file, destination, zip, basePath);
+        })
+        .then(function (linkTo) {
+            return symlink(path.resolve(parent, linkTo), destination)
+            .then(function () {
+                return {symlink: file.path, linkTo: linkTo};
+            });
+        });
+    },
+    // Make a shallow copy of the file/directory this symlink points to instead
+    // of actually creating a link
+    copy: function (file, destination, zip, basePath) {
+        var type;
+        var parent = path.dirname(destination);
+
+        return mkdir(parent, zip.dirCache)
+        .then(function () {
+            return getLinkLocation(file, destination, zip, basePath);
+        })
+        .then(function (linkTo) {
+            return stat(path.resolve(parent, linkTo))
+            .then(function (stats) {
+                if (stats.isFile()) {
+                    type = 'File';
+                    var input = new stream.Readable();
+                    input.wrap(fs.createReadStream(path.resolve(parent, linkTo)));
+                    return pipePromise(input, destination);
+                } else if (stats.isDirectory()) {
+                    type = 'Directory';
+                    return mkdir(destination, zip.dirCache);
+                } else {
+                    throw new Error('Could not follow symlink to unknown file type');
+                }
+            })
+            .then(function () {
+                return {copy: file.path, original: linkTo, type: type};
+            });
+        });
+    }
+};
+
+var getLinkLocation = function (file, destination, zip, basePath) {
+    var parent = path.dirname(destination);
+    return zip.getBuffer(file._offset, file._offset + file.uncompressedSize)
+    .then(function (buffer) {
+        var linkTo = buffer.toString();
+        var fullLink = path.resolve(parent, linkTo);
+
+        if (path.relative(basePath, fullLink).slice(0, 2) === '..') {
+            throw new Error('Symlink links outside archive');
+        }
+
+        return linkTo;
+    });
+};
+
+var pipePromise = function (input, destination) {
+    var deferred = Q.defer();
+    var output = fs.createWriteStream(destination);
+    var errorHandler = function (error) {
+        deferred.reject(error);
+    };
+
+    input.on('error', errorHandler);
+    output.on('error', errorHandler);
+
+    // For node 0.8 we can't just use the 'finish' event of the pipe
+    input.on('end', function () {
+        output.end(function () {
+            deferred.resolve();
+        });
+    });
+
+    input.pipe(output, {end: false});
+
+    return deferred.promise;
+};
+
+module.exports = extractors;
diff --git a/lib/file-details.js b/lib/file-details.js
new file mode 100644
index 0000000..1f3ca68
--- /dev/null
+++ b/lib/file-details.js
@@ -0,0 +1,37 @@
+// Objects with this prototype are used as the public representation of a file
+var path = require('path');
+
+var FileDetails = function (directoryEntry) {
+    // TODO: Add 'extra field' support
+
+    this._offset = 0;
+    this._maxSize = 0;
+
+    this.parent = path.dirname(directoryEntry.fileName);
+    this.filename = path.basename(directoryEntry.fileName);
+    this.path = path.normalize(directoryEntry.fileName);
+
+    this.type = directoryEntry.fileAttributes.type;
+    this.mode = directoryEntry.fileAttributes.mode;
+    this.compressionMethod = directoryEntry.compressionMethod;
+    this.modified = directoryEntry.modifiedTime;
+    this.crc32 = directoryEntry.crc32;
+    this.compressedSize = directoryEntry.compressedSize;
+    this.uncompressedSize = directoryEntry.uncompressedSize;
+    this.comment = directoryEntry.fileComment;
+
+    this.flags = {
+        encrypted: directoryEntry.generalPurposeFlags.encrypted,
+        compressionFlag1: directoryEntry.generalPurposeFlags.compressionFlag1,
+        compressionFlag2: directoryEntry.generalPurposeFlags.compressionFlag2,
+        useDataDescriptor: directoryEntry.generalPurposeFlags.useDataDescriptor,
+        enhancedDeflating: directoryEntry.generalPurposeFlags.enhancedDeflating,
+        compressedPatched: directoryEntry.generalPurposeFlags.compressedPatched,
+        strongEncryption: directoryEntry.generalPurposeFlags.strongEncryption,
+        utf8: directoryEntry.generalPurposeFlags.utf8,
+        encryptedCD: directoryEntry.generalPurposeFlags.encryptedCD
+    };
+
+};
+
+module.exports = FileDetails;
diff --git a/lib/signatures.js b/lib/signatures.js
new file mode 100644
index 0000000..1d29941
--- /dev/null
+++ b/lib/signatures.js
@@ -0,0 +1,10 @@
+module.exports = {
+    LOCAL_FILE_HEADER: 0x04034b50,
+    DATA_DESCRIPTOR_RECORD: 0x08074b50,
+    ARCHIVE_EXTRA_DATA: 0x08064b50,
+    CENTRAL_FILE_HEADER: 0x02014b50,
+    HEADER: 0x05054b50,
+    ZIP64_END_OF_CENTRAL_DIRECTORY: 0x06064b50,
+    ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR: 0x07064b50,
+    END_OF_CENTRAL_DIRECTORY: 0x06054b50
+};
diff --git a/lib/structures.js b/lib/structures.js
new file mode 100644
index 0000000..3f8acaf
--- /dev/null
+++ b/lib/structures.js
@@ -0,0 +1,228 @@
+'use strict';
+var binary = require('binary');
+
+var convertDateTime = function (dosDate, dosTime) {
+    var year = ((dosDate >> 9) & 0x7F) + 1980;
+    var month = (dosDate >> 5) & 0x0F;
+    var day = dosDate & 0x1F;
+
+    var hour = (dosTime >> 11);
+    var minute = (dosTime >> 5) & 0x3F;
+    var second = (dosTime & 0x1F) * 2;
+
+    var result = new Date(year, month - 1, day, hour, minute, second, 0);
+
+    return result;
+};
+
+var convertGeneralPurposeFlags = function (value) {
+    var bits = [];
+
+    for (var i = 0; i < 16; i++) {
+        bits[i] = (value >> i) & 1;
+    }
+
+    return {
+        encrypted: !!bits[0],
+        compressionFlag1: !!bits[1],
+        compressionFlag2: !!bits[2],
+        useDataDescriptor: !!bits[3],
+        enhancedDeflating: !!bits[4],
+        compressedPatched: !!bits[5],
+        strongEncryption: !!bits[6],
+        utf8: !!bits[11],
+        encryptedCD: !!bits[13]
+    };
+};
+
+var parseExternalFileAttributes = function (externalAttributes, platform) {
+    var types = {
+        // In theory, any of these could be set. Realistically, though, it will
+        // be regular, directory or symlink
+        1: 'NamedPipe',
+        2: 'Character',
+        4: 'Directory',
+        6: 'Block',
+        8: 'File',
+        10: 'SymbolicLink',
+        12: 'Socket'
+    };
+
+    switch (platform) {
+
+    case 3: // Unix
+        return {
+            platform: 'Unix',
+            type: types[(externalAttributes >> 60) & 0x0F],
+            mode: (externalAttributes >> 48) & 0xFFF
+        };
+
+    // case 0: // MSDOS
+    default:
+        if (platform !== 0) {
+            console.warn('Possibly unsupported ZIP platform type, ' + platform);
+        }
+
+        var attribs = {
+            A: (externalAttributes >> 5) & 0x01,
+            D: (externalAttributes >> 4) & 0x01,
+            V: (externalAttributes >> 3) & 0x01,
+            S: (externalAttributes >> 2) & 0x01,
+            H: (externalAttributes >> 1) & 0x01,
+            R: externalAttributes & 0x01
+        };
+
+        // With no better guidance we'll make the default permissions ugo+r
+        var mode = parseInt('0444', 8);
+
+        if (attribs.D) {
+            mode |= parseInt('0111', 8); // Set the execute bit
+        }
+
+        if (!attribs.R) {
+            mode |= parseInt('0222', 8); // Set the write bit
+        }
+
+        mode &= ~process.umask();
+
+        return {
+            platform: 'DOS',
+            type: attribs.D ? 'Directory' : 'File',
+            mode: mode
+        };
+    }
+};
+
+var readEndRecord = function (buffer) {
+    var data = binary.parse(buffer)
+    .word32lu('signature')
+    .word16lu('diskNumber')
+    .word16lu('directoryStartDisk')
+    .word16lu('directoryEntryCountDisk')
+    .word16lu('directoryEntryCount')
+    .word32lu('directorySize')
+    .word32lu('directoryOffset')
+    .word16lu('commentLength')
+    .buffer('comment', 'commentLength')
+    .vars;
+
+    data.comment = data.comment.toString();
+
+    return data;
+};
+
+var directorySort = function (a, b) {
+    return a.relativeOffsetOfLocalHeader - b.relativeOffsetOfLocalHeader;
+};
+
+var readDirectory = function (buffer) {
+    var directory = [];
+    var current;
+    var index = 0;
+
+    while (index < buffer.length) {
+        current = binary.parse(buffer.slice(index, index + 46))
+        .word32lu('signature')
+        .word8lu('creatorSpecVersion')
+        .word8lu('creatorPlatform')
+        .word8lu('requiredSpecVersion')
+        .word8lu('requiredPlatform')
+        .word16lu('generalPurposeBitFlag')
+        .word16lu('compressionMethod')
+        .word16lu('lastModFileTime')
+        .word16lu('lastModFileDate')
+        .word32lu('crc32')
+        .word32lu('compressedSize')
+        .word32lu('uncompressedSize')
+        .word16lu('fileNameLength')
+        .word16lu('extraFieldLength')
+        .word16lu('fileCommentLength')
+        .word16lu('diskNumberStart')
+        .word16lu('internalFileAttributes')
+        .word32lu('externalFileAttributes')
+        .word32lu('relativeOffsetOfLocalHeader')
+        .vars;
+
+        index += 46;
+
+        current.generalPurposeFlags = convertGeneralPurposeFlags(current.generalPurposeBitFlag);
+        current.fileAttributes = parseExternalFileAttributes(current.externalFileAttributes, current.creatorPlatform);
+
+        current.modifiedTime = convertDateTime(current.lastModFileDate, current.lastModFileTime);
+        current.fileName = current.extraField = current.fileComment = '';
+        current.headerLength = 46 + current.fileNameLength + current.extraFieldLength + current.fileCommentLength;
+
+        if (current.fileNameLength > 0) {
+            current.fileName = buffer.slice(index, index + current.fileNameLength).toString();
+            index += current.fileNameLength;
+        }
+
+        if (current.extraFieldLength > 0) {
+            current.extraField = buffer.slice(index, index + current.extraFieldLength).toString();
+            index += current.extraFieldLength;
+        }
+
+        if (current.fileCommentLength > 0) {
+            current.fileComment = buffer.slice(index, index + current.fileCommentLength).toString();
+            index += current.fileCommentLength;
+        }
+
+        if (current.fileAttributes.type !== 'Directory' && current.fileName.substr(-1) === '/') {
+            // TODO: check that this is a reasonable check
+            current.fileAttributes.type = 'Directory';
+        }
+
+        directory.push(current);
+    }
+
+    directory.sort(directorySort);
+
+    return directory;
+};
+
+var readFileEntry = function (buffer) {
+    var index = 0;
+
+    var fileEntry = binary.parse(buffer.slice(index, 30))
+    .word32lu('signature')
+    .word16lu('versionNeededToExtract')
+    .word16lu('generalPurposeBitFlag')
+    .word16lu('compressionMethod')
+    .word16lu('lastModFileTime')
+    .word16lu('lastModFileDate')
+    .word32lu('crc32')
+    .word32lu('compressedSize')
+    .word32lu('uncompressedSize')
+    .word16lu('fileNameLength')
+    .word16lu('extraFieldLength')
+    .vars;
+
+    index += 30;
+
+    fileEntry.fileName = fileEntry.extraField = '';
+
+    fileEntry.entryLength = 30 + fileEntry.fileNameLength + fileEntry.extraFieldLength;
+
+    if (fileEntry.entryLength > structures.maxFileEntrySize) {
+        throw new Error('File entry unexpectedly large: ' + fileEntry.entryLength + ' (max: ' + structures.maxFileEntrySize + ')');
+    }
+
+    if (fileEntry.fileNameLength > 0) {
+        fileEntry.fileName = buffer.slice(index, index + fileEntry.fileNameLength).toString();
+        index += fileEntry.fileNameLength;
+    }
+
+    if (fileEntry.extraFieldLength > 0) {
+        fileEntry.extraField = buffer.slice(index, index + fileEntry.extraFieldLength).toString();
+        index += fileEntry.extraFieldLength;
+    }
+
+    return fileEntry;
+};
+
+var structures = module.exports = {
+    readEndRecord: readEndRecord,
+    readDirectory: readDirectory,
+    readFileEntry: readFileEntry,
+    maxFileEntrySize: 4096
+};
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..98fa28e
--- /dev/null
+++ b/package.json
@@ -0,0 +1,57 @@
+{
+  "name": "decompress-zip",
+  "version": "0.1.0",
+  "description": "Extract files from a ZIP archive",
+  "main": "lib/decompress-zip.js",
+  "scripts": {
+    "test": "grunt test"
+  },
+  "bin": {
+    "decompress-zip": "bin/decompress-zip"
+  },
+  "repository": "bower/decompress-zip.git",
+  "engines": {
+    "node": ">=0.10.0"
+  },
+  "keywords": [
+    "zip",
+    "unzip",
+    "tar",
+    "untar",
+    "compress",
+    "decompress",
+    "archive",
+    "extract",
+    "zlib"
+  ],
+  "author": "Bower team",
+  "licenses": [
+    {
+      "type": "MIT",
+      "url": "https://github.com/bower/bower/blob/master/LICENSE"
+    }
+  ],
+  "dependencies": {
+    "binary": "^0.3.0",
+    "graceful-fs": "^3.0.0",
+    "mkpath": "^0.1.0",
+    "nopt": "^3.0.1",
+    "q": "^1.1.2",
+    "readable-stream": "^1.1.8",
+    "touch": "0.0.3"
+  },
+  "devDependencies": {
+    "chai": "^1.10.0",
+    "glob": "^4.3.2",
+    "grunt": "^0.4.1",
+    "grunt-cli": "^0.1.13",
+    "grunt-contrib-jshint": "^0.10.0",
+    "grunt-contrib-watch": "^0.6.1",
+    "grunt-exec": "^0.4.2",
+    "grunt-simple-mocha": "^0.4.0",
+    "istanbul": "^0.3.5",
+    "mocha": "^2.1.0",
+    "request": "^2.51.0",
+    "tmp": "0.0.24"
+  }
+}
diff --git a/test/test.js b/test/test.js
new file mode 100644
index 0000000..03c6d47
--- /dev/null
+++ b/test/test.js
@@ -0,0 +1,181 @@
+'use strict';
+var path = require('path');
+var exec = require('child_process').exec;
+var glob = require('glob');
+var assert = require('chai').assert;
+var tmp = require('tmp');
+var assetsPath = path.join(__dirname, 'assets');
+var DecompressZip = require('../lib/decompress-zip');
+
+var samples = glob.sync('*/archive.zip', {cwd: assetsPath});
+
+if (samples.length === 0) {
+    console.log('No sample ZIP files were found. Run "grunt test-files" to download them.');
+    process.exit(1);
+}
+
+describe('Smoke test', function () {
+    it('should find the public interface', function () {
+        assert.isFunction(DecompressZip, 'constructor is a function');
+        assert.isFunction(DecompressZip.prototype.list, 'decompress.list is a function');
+        assert.isFunction(DecompressZip.prototype.extract, 'decompress.extract is a function');
+    });
+});
+
+describe('Extract', function () {
+    describe('errors', function () {
+        var tmpDir;
+
+        before(function (done) {
+            tmp.dir({unsafeCleanup: true}, function (err, dir) {
+                if (err) {
+                    throw err;
+                }
+
+                tmpDir = dir;
+                done();
+            });
+        });
+
+        it('should emit an error when the file does not exist', function (done) {
+            var zip = new DecompressZip('/my/non/existant/file.zip');
+
+            zip.on('extract', function () {
+                assert(false, '"extract" event should not fire');
+                done();
+            });
+
+            zip.on('error', function (error) {
+                assert(true, '"error" event should fire');
+                done();
+            });
+
+            zip.extract({path: tmpDir});
+        });
+
+        it('should emit an error when stripping deeper than the path structure', function (done) {
+            var zip = new DecompressZip(path.join(assetsPath, samples[0]));
+
+            zip.on('extract', function () {
+                assert(false, '"extract" event should not fire');
+                done();
+            });
+
+            zip.on('error', function (error) {
+                assert(true, '"error" event should fire');
+                done();
+            });
+
+            zip.extract({path: tmpDir, strip: 3});
+        });
+
+        it('should emit a progress event on each file', function (done) {
+            var zip = new DecompressZip(path.join(assetsPath, samples[0]));
+            var numProgressEvents = 0;
+            var numTotalFiles = 921;
+
+            zip.on('progress', function (i, numFiles) {
+                assert.equal(numFiles, numTotalFiles, '"progress" event should include the correct number of files');
+                assert(typeof i === 'number', '"progress" event should include the number of the current file');
+                numProgressEvents++;
+            });
+
+            zip.on('extract', function () {
+                assert(true, '"extract" event should fire');
+                assert.equal(numProgressEvents, numTotalFiles, 'there should be a "progress" event for every file');
+                done();
+            });
+
+            zip.on('error', function (error) {
+                assert(false, '"error" event should not fire');
+                done();
+            });
+
+            zip.extract({path: tmpDir});
+        });
+    });
+
+    describe('directory creation', function () {
+        var tmpDir;
+        var rmdirSync;
+        before(function (done) {
+            tmp.dir({unsafeCleanup: true}, function (err, dir, cleanupCallback) {
+                if (err) {
+                    throw err;
+                }
+
+                tmpDir = dir;
+                rmdirSync = cleanupCallback;
+                done();
+            });
+        });
+
+        it('should create necessary directories, even on 2nd run', function (done) {
+            var zip = new DecompressZip(path.join(assetsPath, samples[0]));
+            zip.on('error', done);
+            zip.on('extract', function () {
+                rmdirSync(tmpDir);
+                var zip2 = new DecompressZip(path.join(assetsPath, samples[0]));
+                zip2.on('error', done);
+                zip2.on('extract', function () {
+                    done();
+                });
+                zip2.extract({path: tmpDir});
+            });
+
+            zip.extract({path: tmpDir});
+        });
+    });
+
+    samples.forEach(function (sample) {
+        var extracted = path.join(path.dirname(sample), 'extracted');
+
+        describe(sample, function () {
+            var tmpDir;
+
+            before(function (done) {
+                tmp.dir({unsafeCleanup: true}, function (err, dir) {
+                    if (err) {
+                        throw err;
+                    }
+
+                    tmpDir = dir;
+                    done();
+                });
+            });
+
+
+            it('should extract without any errors', function (done) {
+                this.timeout(60000);
+                var zip = new DecompressZip(path.join(assetsPath, sample));
+
+                zip.on('extract', function () {
+                    assert(true, 'success callback should be called');
+                    done();
+                });
+
+                zip.on('error', function () {
+                    assert(false, 'error callback should not be called');
+                    done();
+                });
+
+                zip.extract({path: tmpDir});
+            });
+
+            it('should have the same output files as expected', function (done) {
+                exec('diff -qr ' + extracted + ' ' + tmpDir, {cwd: assetsPath}, function (err, stdout, stderr) {
+                    if (err) {
+                        if (err.code === 1) {
+                            assert(false, 'output should match');
+                        } else {
+                            throw err;
+                        }
+                    }
+                    assert.equal(stdout, '');
+                    assert.equal(stderr, '');
+                    done();
+                });
+            });
+        });
+    });
+});

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/node-decompress-zip.git



More information about the Pkg-javascript-commits mailing list