[Pkg-javascript-commits] [node-readdirp] 01/07: Imported Upstream version 0.2.4

Mike Gabriel sunweaver at debian.org
Thu Dec 15 10:15:52 UTC 2016


This is an automated email from the git hooks/post-receive script.

sunweaver pushed a commit to branch master
in repository node-readdirp.

commit 9f5aca7e433bbfaf2730e700a6b3886c7d4a4134
Author: Mike Gabriel <mike.gabriel at das-netzwerkteam.de>
Date:   Wed May 8 23:21:41 2013 +0200

    Imported Upstream version 0.2.4
---
 .gitignore                                         |  15 ++
 .travis.yml                                        |   5 +
 LICENSE                                            |  16 ++
 README.md                                          | 227 ++++++++++++++++++
 examples/Readme.md                                 |  37 +++
 examples/callback-api.js                           |  10 +
 examples/grep.js                                   |  73 ++++++
 examples/package.json                              |   9 +
 examples/stream-api-pipe.js                        |  13 +
 examples/stream-api.js                             |  15 ++
 package.json                                       |  38 +++
 readdirp.js                                        | 267 +++++++++++++++++++++
 stream-api.js                                      |  86 +++++++
 test/bed/root_dir1/root_dir1_file1.ext1            |   0
 test/bed/root_dir1/root_dir1_file2.ext2            |   0
 test/bed/root_dir1/root_dir1_file3.ext3            |   0
 .../root1_dir1_subdir1_file1.ext1                  |   0
 test/bed/root_dir1/root_dir1_subdir2/.gitignore    |   1 +
 test/bed/root_dir2/root_dir2_file1.ext1            |   0
 test/bed/root_dir2/root_dir2_file2.ext2            |   0
 test/bed/root_dir2/root_dir2_subdir1/.gitignore    |   1 +
 test/bed/root_dir2/root_dir2_subdir2/.gitignore    |   1 +
 test/bed/root_file1.ext1                           |   0
 test/bed/root_file2.ext2                           |   0
 test/bed/root_file3.ext3                           |   0
 test/readdirp-stream.js                            | 215 +++++++++++++++++
 test/readdirp.js                                   | 252 +++++++++++++++++++
 27 files changed, 1281 insertions(+)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..7dccd97
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,15 @@
+lib-cov
+*.seed
+*.log
+*.csv
+*.dat
+*.out
+*.pid
+*.gz
+
+pids
+logs
+results
+
+node_modules
+npm-debug.log
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..84fd7ca
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,5 @@
+language: node_js
+node_js:
+  - 0.6
+  - 0.8
+  - 0.9
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..de78e27
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,16 @@
+lib-cov
+*.seed
+*.log
+*.csv
+*.dat
+*.out
+*.pid
+*.gz
+
+pids
+logs
+results
+
+node_modules
+npm-debug.log
+tmp
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..7b36a8a
--- /dev/null
+++ b/README.md
@@ -0,0 +1,227 @@
+# readdirp [![Build Status](https://secure.travis-ci.org/thlorenz/readdirp.png)](http://travis-ci.org/thlorenz/readdirp)
+
+Recursive version of [fs.readdir](http://nodejs.org/docs/latest/api/fs.html#fs_fs_readdir_path_callback). Exposes a **stream api**.
+
+```javascript
+var readdirp = require('readdirp'); 
+  , path = require('path')
+  , es = require('event-stream');
+
+// print out all JavaScript files along with their size
+
+var stream = readdirp({ root: path.join(__dirname), fileFilter: '*.js' });
+stream
+  .on('warn', function (err) { 
+    console.error('non-fatal error', err); 
+    // optionally call stream.destroy() here in order to abort and cause 'close' to be emitted
+  })
+  .on('error', function (err) { console.error('fatal error', err); })
+  .pipe(es.mapSync(function (entry) { 
+    return { path: entry.path, size: entry.stat.size };
+  }))
+  .pipe(es.stringify())
+  .pipe(process.stdout);
+```
+
+Meant to be one of the recursive versions of [fs](http://nodejs.org/docs/latest/api/fs.html) functions, e.g., like [mkdirp](https://github.com/substack/node-mkdirp).
+
+**Table of Contents**  *generated with [DocToc](http://doctoc.herokuapp.com/)*
+
+- [Installation](#installation)
+- [API](#api)
+	- [entry stream](#entry-stream)
+	- [options](#options)
+	- [entry info](#entry-info)
+	- [Filters](#filters)
+	- [Callback API](#callback-api)
+		- [allProcessed ](#allprocessed)
+		- [fileProcessed](#fileprocessed)
+- [More Examples](#more-examples)
+	- [stream api](#stream-api)
+	- [stream api pipe](#stream-api-pipe)
+	- [grep](#grep)
+	- [using callback api](#using-callback-api)
+	- [tests](#tests)
+
+
+# Installation
+
+    npm install readdirp
+
+# API
+
+***var entryStream = readdirp (options)***
+
+Reads given root recursively and returns a `stream` of [entry info](#entry-info)s.
+
+## entry stream
+
+Behaves as follows:
+  
+- `emit('data')` passes an [entry info](#entry-info) whenever one is found
+- `emit('warn')` passes a non-fatal `Error` that prevents a file/directory from being processed (i.e., if it is
+  inaccessible to the user)
+- `emit('error')` passes a fatal `Error` which also ends the stream (i.e., when illegal options where passed)
+- `emit('end')` called when all entries were found and no more will be emitted (i.e., we are done)
+- `emit('close')` called when the stream is destroyed via `stream.destroy()` (which could be useful if you want to
+  manually abort even on a non fatal error) - at that point the stream is no longer `readable` and no more entries,
+  warning or errors are emitted
+- the stream is `paused` initially in order to allow `pipe` and `on` handlers be connected before data or errors are
+  emitted
+- the stream is `resumed` automatically during the next event loop 
+- to learn more about streams, consult the [stream-handbook](https://github.com/substack/stream-handbook)
+
+## options
+    
+- **root**: path in which to start reading and recursing into subdirectories
+
+- **fileFilter**: filter to include/exclude files found (see [Filters](#filters) for more)
+
+- **directoryFilter**: filter to include/exclude directories found and to recurse into (see [Filters](#filters) for more)
+
+- **depth**: depth at which to stop recursing even if more subdirectories are found
+
+## entry info
+
+Has the following properties:
+
+- **parentDir**     :  directory in which entry was found (relative to given root)
+- **fullParentDir** :  full path to parent directory
+- **name**          :  name of the file/directory
+- **path**          :  path to the file/directory (relative to given root)
+- **fullPath**      :  full path to the file/directory found
+- **stat**          :  built in [stat object](http://nodejs.org/docs/v0.4.9/api/fs.html#fs.Stats)
+- **Example**: (assuming root was `/User/dev/readdirp`)
+        
+        parentDir     :  'test/bed/root_dir1',
+        fullParentDir :  '/User/dev/readdirp/test/bed/root_dir1',
+        name          :  'root_dir1_subdir1',
+        path          :  'test/bed/root_dir1/root_dir1_subdir1',
+        fullPath      :  '/User/dev/readdirp/test/bed/root_dir1/root_dir1_subdir1',
+        stat          :  [ ... ]
+                    
+## Filters
+    
+There are three different ways to specify filters for files and directories respectively. 
+
+- **function**: a function that takes an entry info as a parameter and returns true to include or false to exclude the entry
+
+- **glob string**: a string (e.g., `*.js`) which is matched using [minimatch](https://github.com/isaacs/minimatch), so go there for more
+    information. 
+
+    Globstars (`**`) are not supported since specifiying a recursive pattern for an already recursive function doesn't make sense.
+
+    Negated globs (as explained in the minimatch documentation) are allowed, e.g., `!*.txt` matches everything but text files.
+
+- **array of glob strings**: either need to be all inclusive or all exclusive (negated) patterns otherwise an error is thrown.
+    
+    `[ '*.json', '*.js' ]` includes all JavaScript and Json files.
+    
+    
+    `[ '!.git', '!node_modules' ]` includes all directories except the '.git' and 'node_modules'.
+
+Directories that do not pass a filter will not be recursed into.
+
+## Callback API
+
+Although the stream api is recommended, readdirp also exposes a callback based api.
+
+***readdirp (options, callback1 [, callback2])***
+
+If callback2 is given, callback1 functions as the **fileProcessed** callback, and callback2 as the **allProcessed** callback.
+
+If only callback1 is given, it functions as the **allProcessed** callback.
+
+### allProcessed 
+
+- function with err and res parameters, e.g., `function (err, res) { ... }`
+- **err**: array of errors that occurred during the operation, **res may still be present, even if errors occurred**
+- **res**: collection of file/directory [entry infos](#entry-info)
+
+### fileProcessed
+
+- function with [entry info](#entry-info) parameter e.g., `function (entryInfo) { ... }`
+
+
+# More Examples
+
+`on('error', ..)`, `on('warn', ..)` and `on('end', ..)` handling omitted for brevity
+
+```javascript
+var readdirp = require('readdirp');
+
+// Glob file filter
+readdirp({ root: './test/bed', fileFilter: '*.js' })
+  .on('data', function (entry) {
+    // do something with each JavaScript file entry
+  });
+
+// Combined glob file filters
+readdirp({ root: './test/bed', fileFilter: [ '*.js', '*.json' ] })
+  .on('data', function (entry) {
+    // do something with each JavaScript and Json file entry 
+  });
+
+// Combined negated directory filters
+readdirp({ root: './test/bed', directoryFilter: [ '!.git', '!*modules' ] })
+  .on('data', function (entry) {
+    // do something with each file entry found outside '.git' or any modules directory 
+  });
+
+// Function directory filter
+readdirp({ root: './test/bed', directoryFilter: function (di) { return di.name.length === 9; } })
+  .on('data', function (entry) {
+    // do something with each file entry found inside directories whose name has length 9
+  });
+
+// Limiting depth
+readdirp({ root: './test/bed', depth: 1 })
+  .on('data', function (entry) {
+    // do something with each file entry found up to 1 subdirectory deep
+  });
+
+// callback api
+readdirp(
+    { root: '.' }
+  , function(fileInfo) { 
+      // do something with file entry here
+    } 
+  , function (err, res) {
+      // all done, move on or do final step for all file entries here
+    }
+);
+```
+
+Try more examples by following [instructions](https://github.com/thlorenz/readdirp/blob/master/examples/Readme.md)
+on how to get going.
+
+## stream api
+
+[stream-api.js](https://github.com/thlorenz/readdirp/blob/master/examples/stream-api.js)
+
+Demonstrates error and data handling by listening to events emitted from the readdirp stream.
+
+## stream api pipe
+
+[stream-api-pipe.js](https://github.com/thlorenz/readdirp/blob/master/examples/stream-api-pipe.js)
+
+Demonstrates error handling by listening to events emitted from the readdirp stream and how to pipe the data stream into
+another destination stream.
+
+## grep
+
+[grep.js](https://github.com/thlorenz/readdirp/blob/master/examples/grep.js)
+
+Very naive implementation of grep, for demonstration purposes only.
+
+## using callback api
+
+[callback-api.js](https://github.com/thlorenz/readdirp/blob/master/examples/callback-api.js)
+
+Shows how to pass callbacks in order to handle errors and/or data.
+
+## tests
+
+The [readdirp tests](https://github.com/thlorenz/readdirp/blob/master/test/readdirp.js) also will give you a good idea on
+how things work.
+
diff --git a/examples/Readme.md b/examples/Readme.md
new file mode 100644
index 0000000..55fc461
--- /dev/null
+++ b/examples/Readme.md
@@ -0,0 +1,37 @@
+# readdirp examples
+
+## How to run the examples
+
+Assuming you installed readdirp (`npm install readdirp`), you can do the following:
+
+1. `npm explore readdirp`
+2. `cd examples`
+3. `npm install`
+
+At that point you can run the examples with node, i.e., `node grep`.
+
+## stream api
+
+[stream-api.js](https://github.com/thlorenz/readdirp/blob/master/examples/stream-api.js)
+
+Demonstrates error and data handling by listening to events emitted from the readdirp stream.
+
+## stream api pipe
+
+[stream-api-pipe.js](https://github.com/thlorenz/readdirp/blob/master/examples/stream-api-pipe.js)
+
+Demonstrates error handling by listening to events emitted from the readdirp stream and how to pipe the data stream into
+another destination stream.
+
+## grep
+
+[grep.js](https://github.com/thlorenz/readdirp/blob/master/examples/grep.js)
+
+Very naive implementation of grep, for demonstration purposes only.
+
+## using callback api
+
+[callback-api.js](https://github.com/thlorenz/readdirp/blob/master/examples/callback-api.js)
+
+Shows how to pass callbacks in order to handle errors and/or data.
+
diff --git a/examples/callback-api.js b/examples/callback-api.js
new file mode 100644
index 0000000..39bd2d7
--- /dev/null
+++ b/examples/callback-api.js
@@ -0,0 +1,10 @@
+var readdirp = require('..'); 
+
+readdirp({ root: '.', fileFilter: '*.js' }, function (errors, res) {
+  if (errors) {
+    errors.forEach(function (err) {
+      console.error('Error: ', err);
+    });
+  }
+  console.log('all javascript files', res);
+});
diff --git a/examples/grep.js b/examples/grep.js
new file mode 100644
index 0000000..807fa35
--- /dev/null
+++ b/examples/grep.js
@@ -0,0 +1,73 @@
+'use strict';
+var readdirp =  require('..')
+  , util     =  require('util')
+  , fs       =  require('fs')
+  , path     =  require('path')
+  , Stream   =  require('stream')
+  , tap      =  require('tap-stream')
+  , es       =  require('event-stream')
+  ;
+
+function findLinesMatching (searchTerm) {
+
+  return es.through(function (entry) {
+    var lineno = 0
+      , matchingLines = []
+      , fileStream = this;
+
+    function filter () {
+      return es.mapSync(function (line) {
+        lineno++;
+        return ~line.indexOf(searchTerm) ? lineno + ': ' + line : undefined;
+      });
+    }
+
+    function aggregate () {
+      return es.through(
+          function write (data) { 
+            matchingLines.push(data); 
+          }
+        , function end () {
+
+            // drop files that had no matches
+            if (matchingLines.length) {
+              var result = { file: entry, lines: matchingLines };
+
+              // pass result on to file stream
+              fileStream.emit('data', result);
+            }
+            this.emit('end');
+          }
+      );
+    }
+
+    fs.createReadStream(entry.fullPath, { encoding: 'utf-8' })
+
+      // handle file contents line by line
+      .pipe(es.split('\n'))
+
+      // keep only the lines that matched the term
+      .pipe(filter())
+
+      // aggregate all matching lines and delegate control back to the file stream
+      .pipe(aggregate())
+      ;
+  });
+}
+
+console.log('grepping for "arguments"');
+
+// create a stream of all javascript files found in this and all sub directories
+readdirp({ root: path.join(__dirname), fileFilter: '*.js' })
+
+  // find all lines matching the term for each file (if none found, that file is ignored)
+  .pipe(findLinesMatching('arguments'))
+
+  // format the results and output
+  .pipe(
+    es.mapSync(function (res) {
+      return '\n\n' + res.file.path + '\n\t' + res.lines.join('\n\t');
+    })
+  )
+  .pipe(process.stdout)
+  ;
diff --git a/examples/package.json b/examples/package.json
new file mode 100644
index 0000000..2d9b341
--- /dev/null
+++ b/examples/package.json
@@ -0,0 +1,9 @@
+{
+  "name": "readdirp-examples",
+  "version": "0.0.0",
+  "description": "Examples for readdirp.",
+  "dependencies": {
+    "tap-stream": "~0.1.0",
+    "event-stream": "~3.0.7"
+  }
+}
diff --git a/examples/stream-api-pipe.js b/examples/stream-api-pipe.js
new file mode 100644
index 0000000..b09fe59
--- /dev/null
+++ b/examples/stream-api-pipe.js
@@ -0,0 +1,13 @@
+var readdirp =  require('..')
+  , path = require('path')
+  , es = require('event-stream');
+
+// print out all JavaScript files along with their size
+readdirp({ root: path.join(__dirname), fileFilter: '*.js' })
+  .on('warn', function (err) { console.error('non-fatal error', err); })
+  .on('error', function (err) { console.error('fatal error', err); })
+  .pipe(es.mapSync(function (entry) { 
+    return { path: entry.path, size: entry.stat.size };
+  }))
+  .pipe(es.stringify())
+  .pipe(process.stdout);
diff --git a/examples/stream-api.js b/examples/stream-api.js
new file mode 100644
index 0000000..0f7b327
--- /dev/null
+++ b/examples/stream-api.js
@@ -0,0 +1,15 @@
+var readdirp =  require('..')
+  , path = require('path');
+
+readdirp({ root: path.join(__dirname), fileFilter: '*.js' })
+  .on('warn', function (err) { 
+    console.error('something went wrong when processing an entry', err); 
+  })
+  .on('error', function (err) { 
+    console.error('something went fatally wrong and the stream was aborted', err); 
+  })
+  .on('data', function (entry) { 
+    console.log('%s is ready for processing', entry.path);
+    // process entry here
+  });
+
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..8eeb958
--- /dev/null
+++ b/package.json
@@ -0,0 +1,38 @@
+{
+  "author": "Thorsten Lorenz <thlorenz at gmx.de> (thlorenz.com)",
+  "name": "readdirp",
+  "description": "Recursive version of fs.readdir with streaming api.",
+  "version": "0.2.4",
+  "homepage": "https://github.com/thlorenz/readdirp",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/thlorenz/readdirp.git"
+  },
+  "engines": {
+    "node": ">=0.4"
+  },
+  "keywords": [
+    "recursive",
+    "fs",
+    "stream",
+    "streams",
+    "readdir",
+    "filesystem",
+    "find",
+    "filter"
+  ],
+  "main": "readdirp.js",
+  "scripts": {
+    "test": "tap test/*.js"
+  },
+  "dependencies": {
+    "minimatch": ">=0.2.4"
+  },
+  "devDependencies": {
+    "tap": "~0.3.1",
+    "through": "~1.1.0",
+    "minimatch": "~0.2.7"
+  },
+  "optionalDependencies": {},
+  "license": "MIT"
+}
diff --git a/readdirp.js b/readdirp.js
new file mode 100644
index 0000000..03bfcd7
--- /dev/null
+++ b/readdirp.js
@@ -0,0 +1,267 @@
+'use strict';
+
+var fs        =  require('fs')
+  , path      =  require('path')
+  , minimatch =  require('minimatch')
+  , toString  =  Object.prototype.toString
+  ;
+
+// Standard helpers
+function isFunction (obj) {
+  return toString.call(obj) == '[object Function]';
+}
+
+function isString (obj) {
+  return toString.call(obj) == '[object String]';
+}
+
+function isRegExp (obj) {
+  return toString.call(obj) == '[object RegExp]';
+}
+
+function isUndefined (obj) {
+  return obj === void 0;
+}
+
+/** 
+ * Main function which ends up calling readdirRec and reads all files and directories in given root recursively.
+ * @param { Object }   opts     Options to specify root (start directory), filters and recursion depth
+ * @param { function } callback1  When callback2 is given calls back for each processed file - function (fileInfo) { ... },
+ *                                when callback2 is not given, it behaves like explained in callback2
+ * @param { function } callback2  Calls back once all files have been processed with an array of errors and file infos
+ *                                function (err, fileInfos) { ... }
+ */
+function readdir(opts, callback1, callback2) {
+  var stream
+    , handleError
+    , handleFatalError
+    , pending = 0
+    , errors = []
+    , readdirResult = {
+        directories: []
+      , files: []
+    }
+    , fileProcessed
+    , allProcessed
+    , realRoot
+    , aborted = false
+    ;
+
+  // If no callbacks were given we will use a streaming interface
+  if (isUndefined(callback1)) {
+    var api          =  require('./stream-api')();
+    stream           =  api.stream;
+    callback1        =  api.processEntry;
+    callback2        =  api.done;
+    handleError      =  api.handleError;
+    handleFatalError =  api.handleFatalError;
+
+    stream.on('close', function () { aborted = true; });
+  } else {
+    handleError      =  function (err) { errors.push(err); };
+    handleFatalError =  function (err) {
+      handleError(err);
+      allProcessed(errors, null);
+    };
+  }
+
+  if (isUndefined(opts)){
+    handleFatalError(new Error (
+      'Need to pass at least one argument: opts! \n' +
+      'https://github.com/thlorenz/readdirp#options'
+      )
+    );
+    return stream;
+  }
+
+  opts.root            =  opts.root            || '.';
+  opts.fileFilter      =  opts.fileFilter      || function() { return true; };
+  opts.directoryFilter =  opts.directoryFilter || function() { return true; };
+  opts.depth           =  typeof opts.depth === 'undefined' ? 999999999 : opts.depth;
+
+  if (isUndefined(callback2)) {
+    fileProcessed = function() { };
+    allProcessed = callback1;
+  } else {
+    fileProcessed = callback1;
+    allProcessed = callback2;
+  }
+
+  function normalizeFilter (filter) {
+
+    if (isUndefined(filter)) return undefined;
+
+    function isNegated (filters) {
+
+      function negated(f) { 
+        return f.indexOf('!') === 0; 
+      }
+
+      var some = filters.some(negated);
+      if (!some) {
+        return false;
+      } else {
+        if (filters.every(negated)) {
+          return true;
+        } else {
+          // if we detect illegal filters, bail out immediately
+          throw new Error(
+            'Cannot mix negated with non negated glob filters: ' + filters + '\n' +
+            'https://github.com/thlorenz/readdirp#filters'
+          );
+        }
+      }
+    }
+
+    // Turn all filters into a function
+    if (isFunction(filter)) {
+
+      return filter;
+
+    } else if (isString(filter)) {
+
+      return function (entryInfo) {
+        return minimatch(entryInfo.name, filter.trim());
+      };
+
+    } else if (filter && Array.isArray(filter)) {
+
+      if (filter) filter = filter.map(function (f) {
+        return f.trim();
+      });
+
+      return isNegated(filter) ?
+        // use AND to concat multiple negated filters
+        function (entryInfo) {
+          return filter.every(function (f) {
+            return minimatch(entryInfo.name, f);
+          });
+        }
+        :
+        // use OR to concat multiple inclusive filters
+        function (entryInfo) {
+          return filter.some(function (f) {
+            return minimatch(entryInfo.name, f);
+          });
+        };
+    }
+  }
+
+  function processDir(currentDir, entries, callProcessed) {
+    if (aborted) return;
+    var total = entries.length
+      , processed = 0
+      , entryInfos = []
+      ;
+
+    fs.realpath(currentDir, function(err, realCurrentDir) {
+      if (aborted) return;
+
+      var relDir = path.relative(realRoot, realCurrentDir);
+
+      if (entries.length === 0) {
+        callProcessed([]);
+      } else {
+        entries.forEach(function (entry) { 
+
+          var fullPath = path.join(realCurrentDir, entry),
+            relPath  = path.join(relDir, entry);
+
+          fs.stat(fullPath, function (err, stat) {
+            if (err) {
+              handleError(err);
+            } else {
+              entryInfos.push({
+                  name          :  entry
+                , path          :  relPath   // relative to root
+                , fullPath      :  fullPath
+
+                , parentDir     :  relDir    // relative to root
+                , fullParentDir :  realCurrentDir
+
+                , stat          :  stat
+              });
+            }
+            processed++;
+            if (processed === total) callProcessed(entryInfos);
+          });
+        });
+      }
+    });
+  }
+
+  function readdirRec(currentDir, depth, callCurrentDirProcessed) {
+    if (aborted) return;
+
+    fs.readdir(currentDir, function (err, entries) {
+      if (err) {
+        handleError(err);
+        callCurrentDirProcessed();
+        return;
+      }
+
+      processDir(currentDir, entries, function(entryInfos) {
+
+        var subdirs = entryInfos
+          .filter(function (ei) { return ei.stat.isDirectory() && opts.directoryFilter(ei); });
+
+        subdirs.forEach(function (di) { 
+          readdirResult.directories.push(di); 
+        });
+
+        entryInfos
+          .filter(function(ei) { return ei.stat.isFile() && opts.fileFilter(ei); })
+          .forEach(function (fi) { 
+            fileProcessed(fi);
+            readdirResult.files.push(fi); 
+          });
+
+        var pendingSubdirs = subdirs.length;
+
+        // Be done if no more subfolders exist or we reached the maximum desired depth
+        if(pendingSubdirs === 0 || depth === opts.depth) {
+          callCurrentDirProcessed();
+        } else {
+          // recurse into subdirs, keeping track of which ones are done 
+          // and call back once all are processed
+          subdirs.forEach(function (subdir) {
+            readdirRec(subdir.fullPath, depth + 1, function () {
+              pendingSubdirs = pendingSubdirs - 1;
+              if(pendingSubdirs === 0) { 
+                callCurrentDirProcessed();
+              }
+            });
+          });
+        }
+      });
+    });
+  }
+
+  // Validate and normalize filters
+  try {
+    opts.fileFilter = normalizeFilter(opts.fileFilter);
+    opts.directoryFilter = normalizeFilter(opts.directoryFilter);
+  } catch (err) {
+    // if we detect illegal filters, bail out immediately
+    handleFatalError(err);
+    return stream;
+  }
+
+  // If filters were valid get on with the show
+  fs.realpath(opts.root, function(err, res) {
+    
+    realRoot = res;
+    readdirRec(opts.root, 0, function () { 
+      // All errors are collected into the errors array
+      if (errors.length > 0) {
+        allProcessed(errors, readdirResult); 
+      } else {
+        allProcessed(null, readdirResult);
+      }
+    });
+  });
+
+  return stream;
+}
+
+module.exports = readdir;
diff --git a/stream-api.js b/stream-api.js
new file mode 100644
index 0000000..1cfc616
--- /dev/null
+++ b/stream-api.js
@@ -0,0 +1,86 @@
+var Stream = require('stream');
+
+function createStreamAPI () {
+  var stream
+    , processEntry
+    , done
+    , handleError
+    , handleFatalError
+    , paused = true
+    , controlled = false
+    , buffer = []
+    , closed = false
+    ;
+
+  stream = new Stream();
+  stream.writable = false;
+  stream.readable = true;
+
+  stream.pause = function () {
+    controlled = true;
+    paused = true;
+  };
+
+  stream.resume = function () {
+    controlled = true;
+    paused = false;
+    
+    // emit all buffered entries, errors and ends
+    while (!paused && buffer.length) {
+      var msg = buffer.shift();
+      this.emit(msg.type, msg.data);
+    }
+  };
+
+  stream.destroy = function () {
+    closed = true;
+    stream.readable = false;
+    stream.emit('close');
+  };
+
+  // called for each entry
+  processEntry = function (entry) {
+    if (closed) return;
+    return paused ? buffer.push({ type: 'data', data: entry }) : stream.emit('data', entry);
+  };
+
+  // called with all found entries when directory walk finished
+  done = function (err, entries) {
+    if (closed) return;
+    
+    // since we already emitted each entry and all non fatal errors
+    // all we need to do here is to signal that we are done
+    stream.emit('end');
+  };
+
+  handleError = function (err) {
+    if (closed) return;
+    return paused ? buffer.push({ type: 'warn', data: err }) : stream.emit('warn', err);
+  };
+
+  handleFatalError = function (err) {
+    if (closed) return;
+    return paused ? buffer.push({ type: 'error', data: err }) : stream.emit('error', err);
+  };
+
+  // Allow stream to be returned and handlers to be attached and/or stream to be piped before emitting messages
+  // Otherwise we may loose data/errors that are emitted immediately
+  process.nextTick(function () { 
+    if (closed) return;
+    
+    // In case was controlled (paused/resumed) manually, we don't interfer
+    // see https://github.com/thlorenz/readdirp/commit/ab7ff8561d73fca82c2ce7eb4ce9f7f5caf48b55#commitcomment-1964530
+    if (controlled) return;
+    stream.resume(); 
+  });
+
+  return { 
+      stream           :  stream
+    , processEntry     :  processEntry
+    , done             :  done
+    , handleError      :  handleError
+    , handleFatalError :  handleFatalError
+  };
+}
+
+module.exports = createStreamAPI;
diff --git a/test/bed/root_dir1/root_dir1_file1.ext1 b/test/bed/root_dir1/root_dir1_file1.ext1
new file mode 100644
index 0000000..e69de29
diff --git a/test/bed/root_dir1/root_dir1_file2.ext2 b/test/bed/root_dir1/root_dir1_file2.ext2
new file mode 100644
index 0000000..e69de29
diff --git a/test/bed/root_dir1/root_dir1_file3.ext3 b/test/bed/root_dir1/root_dir1_file3.ext3
new file mode 100644
index 0000000..e69de29
diff --git a/test/bed/root_dir1/root_dir1_subdir1/root1_dir1_subdir1_file1.ext1 b/test/bed/root_dir1/root_dir1_subdir1/root1_dir1_subdir1_file1.ext1
new file mode 100644
index 0000000..e69de29
diff --git a/test/bed/root_dir1/root_dir1_subdir2/.gitignore b/test/bed/root_dir1/root_dir1_subdir2/.gitignore
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/test/bed/root_dir1/root_dir1_subdir2/.gitignore
@@ -0,0 +1 @@
+*
diff --git a/test/bed/root_dir2/root_dir2_file1.ext1 b/test/bed/root_dir2/root_dir2_file1.ext1
new file mode 100644
index 0000000..e69de29
diff --git a/test/bed/root_dir2/root_dir2_file2.ext2 b/test/bed/root_dir2/root_dir2_file2.ext2
new file mode 100644
index 0000000..e69de29
diff --git a/test/bed/root_dir2/root_dir2_subdir1/.gitignore b/test/bed/root_dir2/root_dir2_subdir1/.gitignore
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/test/bed/root_dir2/root_dir2_subdir1/.gitignore
@@ -0,0 +1 @@
+*
diff --git a/test/bed/root_dir2/root_dir2_subdir2/.gitignore b/test/bed/root_dir2/root_dir2_subdir2/.gitignore
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/test/bed/root_dir2/root_dir2_subdir2/.gitignore
@@ -0,0 +1 @@
+*
diff --git a/test/bed/root_file1.ext1 b/test/bed/root_file1.ext1
new file mode 100644
index 0000000..e69de29
diff --git a/test/bed/root_file2.ext2 b/test/bed/root_file2.ext2
new file mode 100644
index 0000000..e69de29
diff --git a/test/bed/root_file3.ext3 b/test/bed/root_file3.ext3
new file mode 100644
index 0000000..e69de29
diff --git a/test/readdirp-stream.js b/test/readdirp-stream.js
new file mode 100644
index 0000000..261c5f6
--- /dev/null
+++ b/test/readdirp-stream.js
@@ -0,0 +1,215 @@
+/*jshint asi:true */
+
+var test      =  require('tap').test
+  , path      =  require('path')
+  , fs        =  require('fs')
+  , util      =  require('util')
+  , Stream    =  require('stream')
+  , through   =  require('through')
+  , streamapi =  require('../stream-api')
+  , readdirp  =  require('..')
+  , root       =  path.join(__dirname, 'bed')
+  , totalDirs  =  6
+  , totalFiles =  12
+  , ext1Files  =  4
+  , ext2Files  =  3
+  , ext3Files  =  2
+  ;
+  
+// see test/readdirp.js for test bed layout
+
+function opts (extend) {
+  var o = { root: root };
+
+  if (extend) {
+    for (var prop in extend) {
+      o[prop] = extend[prop];
+    }
+  }
+  return o;
+}
+
+function capture () {
+  var result = { entries: [], errors: [], ended: false }
+    , dst = new Stream();
+
+  dst.writable = true;
+  dst.readable = true;
+
+  dst.write = function (entry) {
+    result.entries.push(entry);
+  }
+
+  dst.end = function () {
+    result.ended = true;
+    dst.emit('data', result);
+    dst.emit('end');
+  }
+
+  return dst;
+}
+
+test('\nintegrated', function (t) {
+  t.test('\n# reading root without filter', function (t) {
+    t.plan(2);
+    readdirp(opts())
+      .on('error', function (err) {
+        t.fail('should not throw error', err);
+      })
+      .pipe(capture())
+      .pipe(through(
+        function (result) { 
+          t.equals(result.entries.length, totalFiles, 'emits all files');
+          t.ok(result.ended, 'ends stream');
+          t.end();
+        }
+      ));
+  })
+
+  t.test('\n# normal: ["*.ext1", "*.ext3"]', function (t) {
+    t.plan(2);
+
+    readdirp(opts( { fileFilter: [ '*.ext1', '*.ext3' ] } ))
+      .on('error', function (err) {
+        t.fail('should not throw error', err);
+      })
+      .pipe(capture())
+      .pipe(through(
+        function (result) { 
+          t.equals(result.entries.length, ext1Files + ext3Files, 'all ext1 and ext3 files');
+          t.ok(result.ended, 'ends stream');
+          t.end();
+        }
+      ))
+  })
+
+  t.test('\n# negated: ["!*.ext1", "!*.ext3"]', function (t) {
+    t.plan(2);
+
+    readdirp(opts( { fileFilter: [ '!*.ext1', '!*.ext3' ] } ))
+      .on('error', function (err) {
+        t.fail('should not throw error', err);
+      })
+      .pipe(capture())
+      .pipe(through(
+        function (result) { 
+          t.equals(result.entries.length, totalFiles - ext1Files - ext3Files, 'all but ext1 and ext3 files');
+          t.ok(result.ended, 'ends stream');
+          t.end();
+        }
+      ))
+  })
+
+  t.test('\n# no options given', function (t) {
+    t.plan(1);
+    readdirp()
+      .on('error', function (err) {
+        t.similar(err.toString() , /Need to pass at least one argument/ , 'emits meaningful error');
+        t.end();
+      })
+  })
+
+  t.test('\n# mixed: ["*.ext1", "!*.ext3"]', function (t) {
+    t.plan(1);
+
+    readdirp(opts( { fileFilter: [ '*.ext1', '!*.ext3' ] } ))
+      .on('error', function (err) {
+        t.similar(err.toString() , /Cannot mix negated with non negated glob filters/ , 'emits meaningful error');
+        t.end();
+      })
+  })
+})
+
+
+test('\napi separately', function (t) {
+
+  t.test('\n# handleError', function (t) {
+    t.plan(1);
+
+    var api = streamapi()
+      , warning = new Error('some file caused problems');
+
+    api.stream
+      .on('warn', function (err) {
+        t.equals(err, warning, 'warns with the handled error');
+      })
+    api.handleError(warning);
+  })
+
+  t.test('\n# when stream is paused and then resumed', function (t) {
+    t.plan(6);
+    var api = streamapi()
+      , resumed = false
+      , fatalError = new Error('fatal!')
+      , nonfatalError = new Error('nonfatal!')
+      , processedData = 'some data'
+      ;
+
+    api.stream
+      .on('warn', function (err) {
+        t.equals(err, nonfatalError, 'emits the buffered warning');
+        t.ok(resumed, 'emits warning only after it was resumed');
+      })
+      .on('error', function (err) {
+        t.equals(err, fatalError, 'emits the buffered fatal error');
+        t.ok(resumed, 'emits errors only after it was resumed');
+      })
+      .on('data', function (data) {
+        t.equals(data, processedData, 'emits the buffered data');
+        t.ok(resumed, 'emits data only after it was resumed');
+      })
+      .pause()
+    
+    api.processEntry(processedData);
+    api.handleError(nonfatalError);
+    api.handleFatalError(fatalError);
+  
+    process.nextTick(function () {
+      resumed = true;
+      api.stream.resume();
+    })
+  })
+
+  t.test('\n# when a stream is destroyed, it emits "closed", but no longer emits "data", "warn" and "error"', function (t) {
+    t.plan(6)
+    var api = streamapi()
+      , destroyed = false
+      , fatalError = new Error('fatal!')
+      , nonfatalError = new Error('nonfatal!')
+      , processedData = 'some data'
+
+    var stream = api.stream
+      .on('warn', function (err) {
+        t.notOk(destroyed, 'emits warning until destroyed');
+      })
+      .on('error', function (err) {
+        t.notOk(destroyed, 'emits errors until destroyed');
+      })
+      .on('data', function (data) {
+        t.notOk(destroyed, 'emits data until destroyed');
+      })
+      .on('close', function () {
+        t.ok(destroyed, 'emits close when stream is destroyed');
+      })
+    
+
+    api.processEntry(processedData);
+    api.handleError(nonfatalError);
+    api.handleFatalError(fatalError);
+
+    process.nextTick(function () {
+      destroyed = true
+      stream.destroy()
+
+      t.notOk(stream.readable, 'stream is no longer readable after it is destroyed')
+
+      api.processEntry(processedData);
+      api.handleError(nonfatalError);
+      api.handleFatalError(fatalError);
+
+      process.nextTick(function () {
+        t.pass('emits no more data, warn or error events after it was destroyed')  
+      })
+    })
+  })
+})
diff --git a/test/readdirp.js b/test/readdirp.js
new file mode 100644
index 0000000..f3edb52
--- /dev/null
+++ b/test/readdirp.js
@@ -0,0 +1,252 @@
+/*jshint asi:true */
+
+var test     =  require('tap').test
+  , path     =  require('path')
+  , fs       =  require('fs')
+  , util     =  require('util')
+  , readdirp =  require('../readdirp.js')
+  , root     =  path.join(__dirname, '../test/bed')
+  , totalDirs          =  6
+  , totalFiles         =  12
+  , ext1Files          =  4
+  , ext2Files          =  3
+  , ext3Files          =  2
+  , rootDir2Files      =  2
+  , nameHasLength9Dirs =  2
+  , depth1Files        =  8
+  , depth0Files        =  3
+  ;
+
+/* 
+Structure of test bed:
+    .
+    ├── root_dir1
+    │   ├── root_dir1_file1.ext1
+    │   ├── root_dir1_file2.ext2
+    │   ├── root_dir1_file3.ext3
+    │   ├── root_dir1_subdir1
+    │   │   └── root1_dir1_subdir1_file1.ext1
+    │   └── root_dir1_subdir2
+    │       └── .gitignore
+    ├── root_dir2
+    │   ├── root_dir2_file1.ext1
+    │   ├── root_dir2_file2.ext2
+    │   ├── root_dir2_subdir1
+    │   │   └── .gitignore
+    │   └── root_dir2_subdir2
+    │       └── .gitignore
+    ├── root_file1.ext1
+    ├── root_file2.ext2
+    └── root_file3.ext3
+
+    6 directories, 13 files
+*/
+
+// console.log('\033[2J'); // clear console
+
+function opts (extend) {
+  var o = { root: root };
+
+  if (extend) {
+    for (var prop in extend) {
+      o[prop] = extend[prop];
+    }
+  }
+  return o;
+}
+
+test('\nreading root without filter', function (t) {
+  t.plan(2);
+  readdirp(opts(), function (err, res) {
+    t.equals(res.directories.length, totalDirs, 'all directories');
+    t.equals(res.files.length, totalFiles, 'all files');
+    t.end();
+  }) 
+})
+
+test('\nreading root using glob filter', function (t) {
+  // normal
+  t.test('\n# "*.ext1"', function (t) {
+    t.plan(1);
+    readdirp(opts( { fileFilter: '*.ext1' } ), function (err, res) {
+      t.equals(res.files.length, ext1Files, 'all ext1 files');
+      t.end();
+    })
+  })
+  t.test('\n# ["*.ext1", "*.ext3"]', function (t) {
+    t.plan(1);
+    readdirp(opts( { fileFilter: [ '*.ext1', '*.ext3' ] } ), function (err, res) {
+      t.equals(res.files.length, ext1Files + ext3Files, 'all ext1 and ext3 files');
+      t.end();
+    })
+  })
+  t.test('\n# "root_dir1"', function (t) {
+    t.plan(1);
+    readdirp(opts( { directoryFilter: 'root_dir1' }), function (err, res) {
+      t.equals(res.directories.length, 1, 'one directory');
+      t.end();
+    })
+  })
+  t.test('\n# ["root_dir1", "*dir1_subdir1"]', function (t) {
+    t.plan(1);
+    readdirp(opts( { directoryFilter: [ 'root_dir1', '*dir1_subdir1' ]}), function (err, res) {
+      t.equals(res.directories.length, 2, 'two directories');
+      t.end();
+    })
+  })
+
+  t.test('\n# negated: "!*.ext1"', function (t) {
+    t.plan(1);
+    readdirp(opts( { fileFilter: '!*.ext1' } ), function (err, res) {
+      t.equals(res.files.length, totalFiles - ext1Files, 'all but ext1 files');
+      t.end();
+    })
+  })
+  t.test('\n# negated: ["!*.ext1", "!*.ext3"]', function (t) {
+    t.plan(1);
+    readdirp(opts( { fileFilter: [ '!*.ext1', '!*.ext3' ] } ), function (err, res) {
+      t.equals(res.files.length, totalFiles - ext1Files - ext3Files, 'all but ext1 and ext3 files');
+      t.end();
+    })
+  })
+
+  t.test('\n# mixed: ["*.ext1", "!*.ext3"]', function (t) {
+    t.plan(1);
+    readdirp(opts( { fileFilter: [ '*.ext1', '!*.ext3' ] } ), function (err, res) {
+      t.similar(err[0].toString(), /Cannot mix negated with non negated glob filters/, 'returns meaningfull error');
+      t.end();
+    })
+  })
+
+  t.test('\n# leading and trailing spaces: [" *.ext1", "*.ext3 "]', function (t) {
+    t.plan(1);
+    readdirp(opts( { fileFilter: [ ' *.ext1', '*.ext3 ' ] } ), function (err, res) {
+      t.equals(res.files.length, ext1Files + ext3Files, 'all ext1 and ext3 files');
+      t.end();
+    })
+  })
+  t.test('\n# leading and trailing spaces: [" !*.ext1", " !*.ext3 "]', function (t) {
+    t.plan(1);
+    readdirp(opts( { fileFilter: [ ' !*.ext1', ' !*.ext3' ] } ), function (err, res) {
+      t.equals(res.files.length, totalFiles - ext1Files - ext3Files, 'all but ext1 and ext3 files');
+      t.end();
+    })
+  })
+
+  t.test('\n# ** glob pattern', function (t) {
+    t.plan(1);
+    readdirp(opts( { fileFilter: '**/*.ext1' } ), function (err, res) {
+      t.equals(res.files.length, ext1Files, 'ignores ** in **/*.ext1 -> only *.ext1 files');
+      t.end();
+    })
+  })
+})
+
+test('\n\nreading root using function filter', function (t) {
+  t.test('\n# file filter -> "contains root_dir2"', function (t) {
+    t.plan(1);
+    readdirp(
+        opts( { fileFilter: function (fi) { return fi.name.indexOf('root_dir2') >= 0; } })
+      , function (err, res) {
+          t.equals(res.files.length, rootDir2Files, 'all rootDir2Files');
+          t.end();
+      }
+    )
+  })
+  
+  t.test('\n# directory filter -> "name has length 9"', function (t) {
+    t.plan(1);
+    readdirp(
+        opts( { directoryFilter: function (di) { return di.name.length === 9; } })
+      , function (err, res) {
+          t.equals(res.directories.length, nameHasLength9Dirs, 'all all dirs with name length 9');
+          t.end();
+      }
+    )
+  })
+})
+
+test('\nreading root specifying maximum depth', function (t) {
+  t.test('\n# depth 1', function (t) {
+    t.plan(1);
+      readdirp(opts( { depth: 1 } ), function (err, res) {
+        t.equals(res.files.length, depth1Files, 'does not return files at depth 2');
+      })
+  })
+})
+
+test('\nreading root with no recursion', function (t) {
+  t.test('\n# depth 0', function (t) {
+    t.plan(1);
+      readdirp(opts( { depth: 0 } ), function (err, res) {
+        t.equals(res.files.length, depth0Files, 'does not return files at depth 0');
+      })
+  })
+})
+
+test('\nprogress callbacks', function (t) {
+  t.plan(2);
+
+  var pluckName = function(fi) { return fi.name; }
+    , processedFiles = [];
+
+  readdirp(
+      opts() 
+    , function(fi) { 
+        processedFiles.push(fi);
+      } 
+    , function (err, res) {
+        t.equals(processedFiles.length, res.files.length, 'calls back for each file processed');
+        t.deepEquals(processedFiles.map(pluckName).sort(),res.files.map(pluckName).sort(), 'same file names');
+        t.end();
+      }
+  )
+})
+
+test('resolving of name, full and relative paths', function (t) {
+  var expected = {  
+        name          :  'root_dir1_file1.ext1'
+      , parentDirName :  'root_dir1'
+      , path          :  'root_dir1/root_dir1_file1.ext1'
+      , fullPath      :  'test/bed/root_dir1/root_dir1_file1.ext1'
+      }
+    , opts = [ 
+        { root: './bed'          ,  prefix: ''     }
+      , { root: './bed/'         ,  prefix: ''     }
+      , { root: 'bed'            ,  prefix: ''     }
+      , { root: 'bed/'           ,  prefix: ''     }
+      , { root: '../test/bed/'   ,  prefix: ''     }
+      , { root: '.'              ,  prefix: 'bed'  }
+    ]
+  t.plan(opts.length);
+  
+  opts.forEach(function (op) {
+    op.fileFilter = 'root_dir1_file1.ext1';
+
+    t.test('\n' + util.inspect(op), function (t) {
+      t.plan(4);
+
+      readdirp (op, function(err, res) {
+        t.equals(res.files[0].name, expected.name, 'correct name');
+        t.equals(res.files[0].path, path.join(op.prefix, expected.path), 'correct path');
+      })
+
+      fs.realpath(op.root, function(err, fullRoot) {
+        readdirp (op, function(err, res) {
+          t.equals(
+              res.files[0].fullParentDir
+            , path.join(fullRoot, op.prefix, expected.parentDirName)
+            , 'correct parentDir'
+          );
+          t.equals(
+              res.files[0].fullPath
+            , path.join(fullRoot, op.prefix, expected.parentDirName, expected.name)
+            , 'correct fullPath'
+          );
+        })
+      })
+    })
+  })
+})
+
+

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/node-readdirp.git



More information about the Pkg-javascript-commits mailing list