[Pkg-javascript-commits] [node-socket.io-parser] 01/02: Imported Upstream version 2.2.4

Sebastiaan Couwenberg sebastic at moszumanska.debian.org
Sat Mar 28 23:13:14 UTC 2015


This is an automated email from the git hooks/post-receive script.

sebastic pushed a commit to branch master
in repository node-socket.io-parser.

commit 3e03324c605f51aece2115e4ce3e7a120f69b716
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date:   Sat Mar 28 21:37:57 2015 +0100

    Imported Upstream version 2.2.4
---
 .gitignore          |   3 +
 .npmignore          |   5 +
 .travis.yml         |  11 ++
 .zuul.yml           |  16 +++
 History.md          |  97 +++++++++++++
 Makefile            |  11 ++
 Readme.md           |  73 ++++++++++
 bench/bench.js      |  10 ++
 bench/index.js      |  81 +++++++++++
 binary.js           | 141 ++++++++++++++++++
 index.js            | 400 ++++++++++++++++++++++++++++++++++++++++++++++++++++
 is-buffer.js        |  13 ++
 package.json        |  25 ++++
 test/arraybuffer.js |  57 ++++++++
 test/blob.js        |  67 +++++++++
 test/buffer.js      |  25 ++++
 test/helpers.js     |  46 ++++++
 test/index.js       |  30 ++++
 test/parser.js      |  62 ++++++++
 test/support/env.js |   5 +
 20 files changed, 1178 insertions(+)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..bb66ab2
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+node_modules
+build
+components
diff --git a/.npmignore b/.npmignore
new file mode 100644
index 0000000..7fbb06f
--- /dev/null
+++ b/.npmignore
@@ -0,0 +1,5 @@
+node_modules
+build
+components
+
+test
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..c1c830a
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,11 @@
+language: node_js
+node_js:
+  - 0.10
+notifications:
+  irc: irc.freenode.org##socket.io
+git:
+  depth: 1
+env:
+  global:
+  - secure: 28HHk1J0H64KNjzmmlxG/Ro0EPuLnhO0rU+kZjjthDHj/FaugIsqjAVQ1Dl6heWV2/MlVzw6nDCyMNiGwiVk0ruZPQ0SYdAKLplrdMtzAihbU3xx+ULFQPLM3SoW0ZFCEpe8dWPGy4WdgW7aLByeel9TJb3vlhAu7p7AvrcO7Fs=
+  - secure: rKEG0Cfw0vkw8thk63RHYG7h8XWYcBlvZ0w1IWpr2dAfnnLMi1palSTrBrFQc77flk7rN00zGIe76FhKydV9r4WWYAUYKPqo4k+9/FkpzjZlNtL49QRoNwC9jmJ8OeUwGowA13gZPyl/5P13wVaKCB0YrKnzz5LHo3Sp7So6J8U=
diff --git a/.zuul.yml b/.zuul.yml
new file mode 100644
index 0000000..38bc342
--- /dev/null
+++ b/.zuul.yml
@@ -0,0 +1,16 @@
+ui: mocha-bdd
+browsers: 
+  - name: chrome
+    version: 29..latest
+  - name: firefox
+    version: latest
+  - name: safari
+    version: latest
+  - name: ie
+    version: 10
+    platform: Windows 2012
+  - name: ie
+    version: 9
+    version: [6..9, latest]
+  - name: iphone
+    version: oldest..latest
diff --git a/History.md b/History.md
new file mode 100644
index 0000000..eaad1b9
--- /dev/null
+++ b/History.md
@@ -0,0 +1,97 @@
+
+2.2.4 / 2015-03-03
+==================
+
+ * index: fix off-by-one bound checks
+
+2.2.3 / 2015-02-03
+==================
+
+ * index: fix potential infinite loop with malicious binary packet
+
+2.2.2 / 2014-09-04
+==================
+
+ * prevent direct `Buffer` reference that breaks browserify
+ * binary: reuse `isBuf` helper
+
+2.2.1 / 2014-06-20
+==================
+
+ * added benchmarking [kevin-roark]
+ * upgrade component-emitter to 1.1.2 [kevin-roark]
+ * update protocol version [kevin-roark]
+ * less indentation and a small optimization [kevin-roark]
+
+2.2.0 / 2014-05-30
+==================
+
+ * added a BINARY_ACK type [kevin-roark]
+
+2.1.5 / 2014-05-24
+==================
+
+ * don't iterate keys of `Date` objects [Rase-]
+
+2.1.4 / 2014-05-17
+==================
+
+ * fix null reconstruction bug [kevin-roark]
+
+2.1.3 / 2014-04-27
+==================
+
+ * bump zuul version
+ * updated protocol version
+
+2.1.2 / 2014-03-06
+==================
+
+ * added support for binary in ACK packets
+
+2.1.1 / 2014-03-04
+==================
+
+ * removed has-binary-data dependency
+ * fixed the object check in binary.removeBlobs
+
+2.1.0 / 2014-03-01
+==================
+
+ * faster and smaller binary parser and protocol [kevin-roark]
+
+2.0.0 / 2014-02-19
+==================
+
+ * binary support [kevin-roark]
+
+1.1.2 / 2014-02-11
+==================
+
+ * package: bump `json3` to fix IE6-7
+
+1.1.1 / 2014-02-10
+==================
+
+ * package: bump debug to fix browserify issues
+
+1.1.0 / 2013-12-25
+==================
+
+ * index: use `json3`
+
+1.0.3 / 2012-12-18
+==================
+
+  * index: added instrumentation through `debug`
+  * index: make sure decoded `id` is a `Number`
+
+1.0.2 / 2012-12-18
+==================
+
+  * index: allow for falsy values in `id` and `data`
+
+1.0.1 / 2012-12-10
+==================
+
+  * Revision 1
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..ee0a3b2
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,11 @@
+
+REPORTER = dot
+
+test:
+	@./node_modules/.bin/mocha \
+		--reporter $(REPORTER) \
+		--bail \
+		test/index.js
+	@./node_modules/.bin/zuul -- test/index.js
+
+.PHONY: test
diff --git a/Readme.md b/Readme.md
new file mode 100644
index 0000000..2fa0f82
--- /dev/null
+++ b/Readme.md
@@ -0,0 +1,73 @@
+
+# socket.io-parser
+
+[![Build Status](https://secure.travis-ci.org/Automattic/socket.io-parser.svg)](http://travis-ci.org/Automattic/socket.io-parser)
+[![NPM version](https://badge.fury.io/js/socket.io-parser.svg)](http://badge.fury.io/js/socket.io-parser)
+
+A socket.io encoder and decoder written in JavaScript complying with version `3`
+of [socket.io-protocol](https://github.com/learnboost/socket.io-protocol).
+Used by [socket.io](https://github.com/learnboost/socket.io) and
+[socket.io-client](https://github.com/learnboost/socket.io-client).
+
+## Parser API
+
+  socket.io-parser is the reference implementation of socket.io-protocol. Read
+  the full API here:
+  [socket.io-protocol](https://github.com/learnboost/socket.io-protocol).
+
+## Example Usage
+
+### Encoding and decoding a packet
+
+```js
+var parser = require('socket.io-parser');
+var encoder = new parser.Encoder();
+var packet = {
+  type: parser.EVENT,
+  data: 'test-packet',
+  id: 13
+};
+encoder.encode(packet, function(encodedPackets) {
+  var decoder = new parser.Decoder();
+  decoder.on('decoded', function(decodedPacket) {
+    // decodedPacket.type == parser.EVENT
+    // decodedPacket.data == 'test-packet'
+    // decodedPacket.id == 13
+  });
+
+  for (var i = 0; i < encodedPackets.length; i++) {
+    decoder.add(encodedPackets[i]);
+  }
+});
+```
+
+### Encoding and decoding a packet with binary data
+
+```js
+var parser = require('socket.io-parser');
+var encoder = new parser.Encoder();
+var packet = {
+  type: parser.BINARY_EVENT,
+  data: {i: new Buffer(1234), j: new Blob([new ArrayBuffer(2)])}
+  id: 15
+};
+encoder.encode(packet, function(encodedPackets) {
+  var decoder = new parser.Decoder();
+  decoder.on('decoded', function(decodedPacket) {
+    // decodedPacket.type == parser.BINARY_EVENTEVENT
+    // Buffer.isBuffer(decodedPacket.data.i) == true
+    // Buffer.isBuffer(decodedPacket.data.j) == true
+    // decodedPacket.id == 15
+  });
+
+  for (var i = 0; i < encodedPackets.length; i++) {
+    decoder.add(encodedPackets[i]);
+  }
+});
+```
+See the test suite for more examples of how socket.io-parser is used.
+
+
+## License
+
+MIT
diff --git a/bench/bench.js b/bench/bench.js
new file mode 100644
index 0000000..d767d5a
--- /dev/null
+++ b/bench/bench.js
@@ -0,0 +1,10 @@
+var bencher = require('./index');
+bencher(function(benchmark) {
+  function logMean(test) {
+    console.log(test.name + ' mean run time: ' + test.stats.mean);
+  }
+
+  for (var i = 0; i < benchmark.length; i++) {
+    logMean(benchmark[i]);
+  }
+});
diff --git a/bench/index.js b/bench/index.js
new file mode 100644
index 0000000..8c1994e
--- /dev/null
+++ b/bench/index.js
@@ -0,0 +1,81 @@
+var Benchmark = require('benchmark');
+var parser = require('../index');
+
+function test(packet, deferred) {
+  var encoder = new parser.Encoder();
+  var decoder = new parser.Decoder();
+  encoder.encode(packet, function(encodedPackets) {
+    var decoder = new parser.Decoder();
+    decoder.on('decoded', function(packet) {
+      deferred.resolve();
+    });
+
+    decoder.add(encodedPackets[0]);
+  });
+}
+
+var dataObject = {
+  'a': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17],
+  'b': 'xxxyyyzzzalsdfalskdjfalksdjfalksdjfalksdjfjjfjfjfjjfjfjfj',
+  'data': {
+    'is': 'cool',
+    'or': {
+      'is': {
+        'it': true
+      }
+    }
+   }
+};
+var bigArray = [];
+for (var i = 0; i < 250; i++) {
+  bigArray.push(dataObject);
+}
+
+
+
+module.exports = function(callback) {
+  var suite = new Benchmark.Suite();
+
+  suite.add('small json parse', {defer: true, fn: function(deferred) {
+    var packet = {
+      type: parser.EVENT,
+      nsp: '/bench',
+      data: dataObject
+    };
+    test(packet, deferred);
+  }})
+  .add('big json parse', {defer: true, fn: function(deferred) {
+    var packet = {
+      type: parser.EVENT,
+      nsp: '/bench',
+      data: bigArray
+    };
+    test(packet, deferred);
+  }})
+  .add('json with small binary parse', {defer: true, fn: function(deferred) {
+    var packet = {
+      type: parser.EVENT,
+      nsp: '/bench',
+      data: {'a': [1, 2, 3], 'b': 'xxxyyyzzz', 'data': new Buffer(1000)}
+    };
+    test(packet, deferred);
+  }})
+  .add('json with big binary parse', {defer: true, fn: function(deferred) {
+    var bigBinaryData = {
+      bin1: new Buffer(10000),
+      arr: bigArray,
+      bin2: new Buffer(10000),
+      bin3: new Buffer(10000)
+    };
+    var packet = {
+      type: parser.EVENT,
+      nsp: '/bench',
+      data: bigBinaryData
+    };
+    test(packet, deferred);
+  }})
+  .on('complete', function() {
+    callback(this);
+  })
+  .run({'async': true});
+};
diff --git a/binary.js b/binary.js
new file mode 100644
index 0000000..b31f40c
--- /dev/null
+++ b/binary.js
@@ -0,0 +1,141 @@
+/*global Blob,File*/
+
+/**
+ * Module requirements
+ */
+
+var isArray = require('isarray');
+var isBuf = require('./is-buffer');
+
+/**
+ * Replaces every Buffer | ArrayBuffer in packet with a numbered placeholder.
+ * Anything with blobs or files should be fed through removeBlobs before coming
+ * here.
+ *
+ * @param {Object} packet - socket.io event packet
+ * @return {Object} with deconstructed packet and list of buffers
+ * @api public
+ */
+
+exports.deconstructPacket = function(packet){
+  var buffers = [];
+  var packetData = packet.data;
+
+  function _deconstructPacket(data) {
+    if (!data) return data;
+
+    if (isBuf(data)) {
+      var placeholder = { _placeholder: true, num: buffers.length };
+      buffers.push(data);
+      return placeholder;
+    } else if (isArray(data)) {
+      var newData = new Array(data.length);
+      for (var i = 0; i < data.length; i++) {
+        newData[i] = _deconstructPacket(data[i]);
+      }
+      return newData;
+    } else if ('object' == typeof data && !(data instanceof Date)) {
+      var newData = {};
+      for (var key in data) {
+        newData[key] = _deconstructPacket(data[key]);
+      }
+      return newData;
+    }
+    return data;
+  }
+
+  var pack = packet;
+  pack.data = _deconstructPacket(packetData);
+  pack.attachments = buffers.length; // number of binary 'attachments'
+  return {packet: pack, buffers: buffers};
+};
+
+/**
+ * Reconstructs a binary packet from its placeholder packet and buffers
+ *
+ * @param {Object} packet - event packet with placeholders
+ * @param {Array} buffers - binary buffers to put in placeholder positions
+ * @return {Object} reconstructed packet
+ * @api public
+ */
+
+exports.reconstructPacket = function(packet, buffers) {
+  var curPlaceHolder = 0;
+
+  function _reconstructPacket(data) {
+    if (data && data._placeholder) {
+      var buf = buffers[data.num]; // appropriate buffer (should be natural order anyway)
+      return buf;
+    } else if (isArray(data)) {
+      for (var i = 0; i < data.length; i++) {
+        data[i] = _reconstructPacket(data[i]);
+      }
+      return data;
+    } else if (data && 'object' == typeof data) {
+      for (var key in data) {
+        data[key] = _reconstructPacket(data[key]);
+      }
+      return data;
+    }
+    return data;
+  }
+
+  packet.data = _reconstructPacket(packet.data);
+  packet.attachments = undefined; // no longer useful
+  return packet;
+};
+
+/**
+ * Asynchronously removes Blobs or Files from data via
+ * FileReader's readAsArrayBuffer method. Used before encoding
+ * data as msgpack. Calls callback with the blobless data.
+ *
+ * @param {Object} data
+ * @param {Function} callback
+ * @api private
+ */
+
+exports.removeBlobs = function(data, callback) {
+  function _removeBlobs(obj, curKey, containingObject) {
+    if (!obj) return obj;
+
+    // convert any blob
+    if ((global.Blob && obj instanceof Blob) ||
+        (global.File && obj instanceof File)) {
+      pendingBlobs++;
+
+      // async filereader
+      var fileReader = new FileReader();
+      fileReader.onload = function() { // this.result == arraybuffer
+        if (containingObject) {
+          containingObject[curKey] = this.result;
+        }
+        else {
+          bloblessData = this.result;
+        }
+
+        // if nothing pending its callback time
+        if(! --pendingBlobs) {
+          callback(bloblessData);
+        }
+      };
+
+      fileReader.readAsArrayBuffer(obj); // blob -> arraybuffer
+    } else if (isArray(obj)) { // handle array
+      for (var i = 0; i < obj.length; i++) {
+        _removeBlobs(obj[i], i, obj);
+      }
+    } else if (obj && 'object' == typeof obj && !isBuf(obj)) { // and object
+      for (var key in obj) {
+        _removeBlobs(obj[key], key, obj);
+      }
+    }
+  }
+
+  var pendingBlobs = 0;
+  var bloblessData = data;
+  _removeBlobs(bloblessData);
+  if (!pendingBlobs) {
+    callback(bloblessData);
+  }
+};
diff --git a/index.js b/index.js
new file mode 100644
index 0000000..4149558
--- /dev/null
+++ b/index.js
@@ -0,0 +1,400 @@
+
+/**
+ * Module dependencies.
+ */
+
+var debug = require('debug')('socket.io-parser');
+var json = require('json3');
+var isArray = require('isarray');
+var Emitter = require('component-emitter');
+var binary = require('./binary');
+var isBuf = require('./is-buffer');
+
+/**
+ * Protocol version.
+ *
+ * @api public
+ */
+
+exports.protocol = 4;
+
+/**
+ * Packet types.
+ *
+ * @api public
+ */
+
+exports.types = [
+  'CONNECT',
+  'DISCONNECT',
+  'EVENT',
+  'BINARY_EVENT',
+  'ACK',
+  'BINARY_ACK',
+  'ERROR'
+];
+
+/**
+ * Packet type `connect`.
+ *
+ * @api public
+ */
+
+exports.CONNECT = 0;
+
+/**
+ * Packet type `disconnect`.
+ *
+ * @api public
+ */
+
+exports.DISCONNECT = 1;
+
+/**
+ * Packet type `event`.
+ *
+ * @api public
+ */
+
+exports.EVENT = 2;
+
+/**
+ * Packet type `ack`.
+ *
+ * @api public
+ */
+
+exports.ACK = 3;
+
+/**
+ * Packet type `error`.
+ *
+ * @api public
+ */
+
+exports.ERROR = 4;
+
+/**
+ * Packet type 'binary event'
+ *
+ * @api public
+ */
+
+exports.BINARY_EVENT = 5;
+
+/**
+ * Packet type `binary ack`. For acks with binary arguments.
+ *
+ * @api public
+ */
+
+exports.BINARY_ACK = 6;
+
+/**
+ * Encoder constructor.
+ *
+ * @api public
+ */
+
+exports.Encoder = Encoder;
+
+/**
+ * Decoder constructor.
+ *
+ * @api public
+ */
+
+exports.Decoder = Decoder;
+
+/**
+ * A socket.io Encoder instance
+ *
+ * @api public
+ */
+
+function Encoder() {}
+
+/**
+ * Encode a packet as a single string if non-binary, or as a
+ * buffer sequence, depending on packet type.
+ *
+ * @param {Object} obj - packet object
+ * @param {Function} callback - function to handle encodings (likely engine.write)
+ * @return Calls callback with Array of encodings
+ * @api public
+ */
+
+Encoder.prototype.encode = function(obj, callback){
+  debug('encoding packet %j', obj);
+
+  if (exports.BINARY_EVENT == obj.type || exports.BINARY_ACK == obj.type) {
+    encodeAsBinary(obj, callback);
+  }
+  else {
+    var encoding = encodeAsString(obj);
+    callback([encoding]);
+  }
+};
+
+/**
+ * Encode packet as string.
+ *
+ * @param {Object} packet
+ * @return {String} encoded
+ * @api private
+ */
+
+function encodeAsString(obj) {
+  var str = '';
+  var nsp = false;
+
+  // first is type
+  str += obj.type;
+
+  // attachments if we have them
+  if (exports.BINARY_EVENT == obj.type || exports.BINARY_ACK == obj.type) {
+    str += obj.attachments;
+    str += '-';
+  }
+
+  // if we have a namespace other than `/`
+  // we append it followed by a comma `,`
+  if (obj.nsp && '/' != obj.nsp) {
+    nsp = true;
+    str += obj.nsp;
+  }
+
+  // immediately followed by the id
+  if (null != obj.id) {
+    if (nsp) {
+      str += ',';
+      nsp = false;
+    }
+    str += obj.id;
+  }
+
+  // json data
+  if (null != obj.data) {
+    if (nsp) str += ',';
+    str += json.stringify(obj.data);
+  }
+
+  debug('encoded %j as %s', obj, str);
+  return str;
+}
+
+/**
+ * Encode packet as 'buffer sequence' by removing blobs, and
+ * deconstructing packet into object with placeholders and
+ * a list of buffers.
+ *
+ * @param {Object} packet
+ * @return {Buffer} encoded
+ * @api private
+ */
+
+function encodeAsBinary(obj, callback) {
+
+  function writeEncoding(bloblessData) {
+    var deconstruction = binary.deconstructPacket(bloblessData);
+    var pack = encodeAsString(deconstruction.packet);
+    var buffers = deconstruction.buffers;
+
+    buffers.unshift(pack); // add packet info to beginning of data list
+    callback(buffers); // write all the buffers
+  }
+
+  binary.removeBlobs(obj, writeEncoding);
+}
+
+/**
+ * A socket.io Decoder instance
+ *
+ * @return {Object} decoder
+ * @api public
+ */
+
+function Decoder() {
+  this.reconstructor = null;
+}
+
+/**
+ * Mix in `Emitter` with Decoder.
+ */
+
+Emitter(Decoder.prototype);
+
+/**
+ * Decodes an ecoded packet string into packet JSON.
+ *
+ * @param {String} obj - encoded packet
+ * @return {Object} packet
+ * @api public
+ */
+
+Decoder.prototype.add = function(obj) {
+  var packet;
+  if ('string' == typeof obj) {
+    packet = decodeString(obj);
+    if (exports.BINARY_EVENT == packet.type || exports.BINARY_ACK == packet.type) { // binary packet's json
+      this.reconstructor = new BinaryReconstructor(packet);
+
+      // no attachments, labeled binary but no binary data to follow
+      if (this.reconstructor.reconPack.attachments === 0) {
+        this.emit('decoded', packet);
+      }
+    } else { // non-binary full packet
+      this.emit('decoded', packet);
+    }
+  }
+  else if (isBuf(obj) || obj.base64) { // raw binary data
+    if (!this.reconstructor) {
+      throw new Error('got binary data when not reconstructing a packet');
+    } else {
+      packet = this.reconstructor.takeBinaryData(obj);
+      if (packet) { // received final buffer
+        this.reconstructor = null;
+        this.emit('decoded', packet);
+      }
+    }
+  }
+  else {
+    throw new Error('Unknown type: ' + obj);
+  }
+};
+
+/**
+ * Decode a packet String (JSON data)
+ *
+ * @param {String} str
+ * @return {Object} packet
+ * @api private
+ */
+
+function decodeString(str) {
+  var p = {};
+  var i = 0;
+
+  // look up type
+  p.type = Number(str.charAt(0));
+  if (null == exports.types[p.type]) return error();
+
+  // look up attachments if type binary
+  if (exports.BINARY_EVENT == p.type || exports.BINARY_ACK == p.type) {
+    var buf = '';
+    while (str.charAt(++i) != '-') {
+      buf += str.charAt(i);
+      if (i == str.length) break;
+    }
+    if (buf != Number(buf) || str.charAt(i) != '-') {
+      throw new Error('Illegal attachments');
+    }
+    p.attachments = Number(buf);
+  }
+
+  // look up namespace (if any)
+  if ('/' == str.charAt(i + 1)) {
+    p.nsp = '';
+    while (++i) {
+      var c = str.charAt(i);
+      if (',' == c) break;
+      p.nsp += c;
+      if (i == str.length) break;
+    }
+  } else {
+    p.nsp = '/';
+  }
+
+  // look up id
+  var next = str.charAt(i + 1);
+  if ('' !== next && Number(next) == next) {
+    p.id = '';
+    while (++i) {
+      var c = str.charAt(i);
+      if (null == c || Number(c) != c) {
+        --i;
+        break;
+      }
+      p.id += str.charAt(i);
+      if (i == str.length) break;
+    }
+    p.id = Number(p.id);
+  }
+
+  // look up json data
+  if (str.charAt(++i)) {
+    try {
+      p.data = json.parse(str.substr(i));
+    } catch(e){
+      return error();
+    }
+  }
+
+  debug('decoded %s as %j', str, p);
+  return p;
+}
+
+/**
+ * Deallocates a parser's resources
+ *
+ * @api public
+ */
+
+Decoder.prototype.destroy = function() {
+  if (this.reconstructor) {
+    this.reconstructor.finishedReconstruction();
+  }
+};
+
+/**
+ * A manager of a binary event's 'buffer sequence'. Should
+ * be constructed whenever a packet of type BINARY_EVENT is
+ * decoded.
+ *
+ * @param {Object} packet
+ * @return {BinaryReconstructor} initialized reconstructor
+ * @api private
+ */
+
+function BinaryReconstructor(packet) {
+  this.reconPack = packet;
+  this.buffers = [];
+}
+
+/**
+ * Method to be called when binary data received from connection
+ * after a BINARY_EVENT packet.
+ *
+ * @param {Buffer | ArrayBuffer} binData - the raw binary data received
+ * @return {null | Object} returns null if more binary data is expected or
+ *   a reconstructed packet object if all buffers have been received.
+ * @api private
+ */
+
+BinaryReconstructor.prototype.takeBinaryData = function(binData) {
+  this.buffers.push(binData);
+  if (this.buffers.length == this.reconPack.attachments) { // done with buffer list
+    var packet = binary.reconstructPacket(this.reconPack, this.buffers);
+    this.finishedReconstruction();
+    return packet;
+  }
+  return null;
+};
+
+/**
+ * Cleans up binary packet reconstruction variables.
+ *
+ * @api private
+ */
+
+BinaryReconstructor.prototype.finishedReconstruction = function() {
+  this.reconPack = null;
+  this.buffers = [];
+};
+
+function error(data){
+  return {
+    type: exports.ERROR,
+    data: 'parser error'
+  };
+}
diff --git a/is-buffer.js b/is-buffer.js
new file mode 100644
index 0000000..977df88
--- /dev/null
+++ b/is-buffer.js
@@ -0,0 +1,13 @@
+
+module.exports = isBuf;
+
+/**
+ * Returns true if obj is a buffer or an arraybuffer.
+ *
+ * @api private
+ */
+
+function isBuf(obj) {
+  return (global.Buffer && global.Buffer.isBuffer(obj)) ||
+         (global.ArrayBuffer && obj instanceof ArrayBuffer);
+}
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..b92c925
--- /dev/null
+++ b/package.json
@@ -0,0 +1,25 @@
+{
+  "name": "socket.io-parser",
+  "version": "2.2.4",
+  "description": "socket.io protocol parser",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Automattic/socket.io-parser.git"
+  },
+  "dependencies": {
+    "debug": "0.7.4",
+    "json3": "3.2.6",
+    "component-emitter": "1.1.2",
+    "isarray": "0.0.1",
+    "benchmark": "1.0.0"
+  },
+  "devDependencies": {
+    "mocha": "1.16.2",
+    "expect.js": "0.2.0",
+    "zuul": "1.6.3"
+  },
+  "scripts": {
+    "test": "make test"
+  },
+  "license": "MIT"
+}
diff --git a/test/arraybuffer.js b/test/arraybuffer.js
new file mode 100644
index 0000000..0ba4c04
--- /dev/null
+++ b/test/arraybuffer.js
@@ -0,0 +1,57 @@
+var parser = require('../index.js');
+var expect = require('expect.js');
+var helpers = require('./helpers.js');
+var encoder = new parser.Encoder();
+
+describe('parser', function() {
+  it('encodes an ArrayBuffer', function() {
+    var packet = {
+      type: parser.BINARY_EVENT,
+      data: new ArrayBuffer(2),
+      id: 0,
+      nsp: '/'
+    };
+    helpers.test_bin(packet);
+  });
+
+  it('encodes ArrayBuffers deep in JSON', function() {
+    var packet = {
+      type: parser.BINARY_EVENT,
+      data: {a: 'hi', b: {why: new ArrayBuffer(3)}, c: {a: 'bye', b: { a: new ArrayBuffer(6)}}},
+      id: 999,
+      nsp: '/deep'
+    };
+    helpers.test_bin(packet);
+  });
+
+  it('encodes deep binary JSON with null values', function() {
+    var packet = {
+      type: parser.BINARY_EVENT,
+      data: {a: 'b', c: 4, e: {g: null}, h: new ArrayBuffer(9)},
+      nsp: '/',
+      id: 600
+    };
+    helpers.test_bin(packet);
+  });
+
+  it('cleans itself up on close', function() {
+    var packet = {
+      type: parser.BINARY_EVENT,
+      data: [new ArrayBuffer(2), new ArrayBuffer(3)],
+      id: 0,
+      nsp: '/'
+    };
+
+    encoder.encode(packet, function(encodedPackets) {
+      var decoder = new parser.Decoder();
+      decoder.on('decoded', function(packet) {
+        throw new Error("received a packet when not all binary data was sent.");
+      });
+
+      decoder.add(encodedPackets[0]); // add metadata
+      decoder.add(encodedPackets[1]); // add first attachment
+      decoder.destroy(); // destroy before all data added
+      expect(decoder.reconstructor.buffers.length).to.be(0); // expect that buffer is clean
+    });
+  });
+});
diff --git a/test/blob.js b/test/blob.js
new file mode 100644
index 0000000..31d4b24
--- /dev/null
+++ b/test/blob.js
@@ -0,0 +1,67 @@
+var parser = require('../index.js');
+var expect = require('expect.js');
+var helpers = require('./helpers.js');
+var encode = parser.encode;
+var decode = parser.decode;
+
+var BlobBuilder = global.BlobBuilder || global.WebKitBlobBuilder || global.MSBlobBuilder || global.MozBlobBuilder;
+
+describe('parser', function() {
+  it('encodes a Blob', function() {
+    var data;
+    if (BlobBuilder) {
+      var bb = new BlobBuilder();
+      bb.append(new ArrayBuffer(2));
+      data = bb.getBlob();
+    } else {
+      data = new Blob([new ArrayBuffer(2)]);
+    }
+
+    var packet = {
+      type: parser.BINARY_EVENT,
+      data: data,
+      id: 0,
+      nsp: '/'
+    };
+    helpers.test_bin(packet);
+  });
+
+  it('encodes an Blob deep in JSON', function() {
+    var data;
+    if (BlobBuilder) {
+      var bb = new BlobBuilder();
+      bb.append(new ArrayBuffer(2));
+      data = bb.getBlob();
+    } else {
+      data = new Blob([new ArrayBuffer(2)]);
+    }
+
+    var packet = {
+      type: parser.BINARY_EVENT,
+      data: {a: 'hi', b: { why: data }, c: 'bye'},
+      id: 999,
+      nsp: '/deep'
+    };
+    helpers.test_bin(packet);
+  });
+
+  it('encodes a binary ack with a blob', function() {
+    var data;
+    if (BlobBuilder) {
+      var bb = new BlobBuilder();
+      bb.append(new ArrayBuffer(2));
+      data = bb.getBlob();
+    } else {
+      data = new Blob([new ArrayBuffer(2)]);
+    }
+
+    var packet = {
+      type: parser.BINARY_ACK,
+      data: {a: 'hi ack', b: { why: data }, c: 'bye ack'},
+      id: 999,
+      nsp: '/deep'
+    };
+    helpers.test_bin(packet);
+  })
+
+});
diff --git a/test/buffer.js b/test/buffer.js
new file mode 100644
index 0000000..dd27460
--- /dev/null
+++ b/test/buffer.js
@@ -0,0 +1,25 @@
+var parser = require('../index.js');
+var expect = require('expect.js');
+var helpers = require('./helpers.js');
+var encode = parser.encode;
+var decode = parser.decode;
+
+describe('parser', function() {
+  it('encodes a Buffer', function() {
+      helpers.test_bin({
+        type: parser.BINARY_EVENT,
+        data: new Buffer('abc', 'utf8'),
+        id: 23,
+        nsp: '/cool'
+      });
+  });
+
+  it('encodes a binary ack with Buffer', function() {
+    helpers.test_bin({
+      type: parser.BINARY_ACK,
+      data: ['a', new Buffer('xxx', 'utf8'), {}],
+      id: 127,
+      nsp: '/back'
+    })
+  });
+});
diff --git a/test/helpers.js b/test/helpers.js
new file mode 100644
index 0000000..e6a754c
--- /dev/null
+++ b/test/helpers.js
@@ -0,0 +1,46 @@
+var parser = require('../index.js');
+var expect = require('expect.js');
+var encoder = new parser.Encoder();
+
+// tests encoding and decoding a single packet
+module.exports.test = function(obj){
+  encoder.encode(obj, function(encodedPackets) {
+    var decoder = new parser.Decoder();
+    decoder.on('decoded', function(packet) {
+      expect(packet).to.eql(obj);
+    });
+
+    decoder.add(encodedPackets[0]);
+  });
+}
+
+// tests encoding of binary packets
+module.exports.test_bin = function test_bin(obj) {
+  var originalData = obj.data;
+  encoder.encode(obj, function(encodedPackets) {
+    var decoder = new parser.Decoder();
+    decoder.on('decoded', function(packet) {
+      obj.data = originalData;
+      obj.attachments = undefined;
+      expect(obj).to.eql(packet);
+    });
+
+    for (var i = 0; i < encodedPackets.length; i++) {
+      decoder.add(encodedPackets[i]);
+    }
+  });
+}
+
+// array buffer's slice is native code that is not transported across
+// socket.io via msgpack, so regular .eql fails
+module.exports.testArrayBuffers = function(buf1, buf2) {
+   buf1.slice = undefined;
+   buf2.slice = undefined;
+   expect(buf1).to.eql(buf2);
+}
+
+module.exports.testPacketMetadata = function(p1, p2) {
+  expect(p1.type).to.eql(p2.type);
+  expect(p1.id).to.eql(p2.id);
+  expect(p1.nsp).to.eql(p2.nsp);
+}
diff --git a/test/index.js b/test/index.js
new file mode 100644
index 0000000..ba6ce75
--- /dev/null
+++ b/test/index.js
@@ -0,0 +1,30 @@
+var env = require('./support/env.js');
+
+var blobSupported = (function() {
+  try {
+    new Blob(['hi']);
+    return true;
+  } catch(e) {}
+  return false;
+})();
+
+/**
+ * Create a blob builder even when vendor prefixes exist
+ */
+
+var BlobBuilder = global.BlobBuilder || global.WebKitBlobBuilder || global.MSBlobBuilder || global.MozBlobBuilder;
+var blobBuilderSupported = !!BlobBuilder && !!BlobBuilder.prototype.append && !!BlobBuilder.prototype.getBlob;
+
+require('./parser.js');
+
+if (!env.browser) {
+  require('./buffer.js');
+}
+
+if (global.ArrayBuffer) {
+  require('./arraybuffer.js');
+}
+
+if (blobSupported || blobBuilderSupported) {
+  require('./blob.js');
+}
diff --git a/test/parser.js b/test/parser.js
new file mode 100644
index 0000000..346ca3b
--- /dev/null
+++ b/test/parser.js
@@ -0,0 +1,62 @@
+var parser = require('../index.js');
+var expect = require('expect.js');
+var helpers = require('./helpers.js');
+var encode = parser.encode;
+var decode = parser.decode;
+
+describe('parser', function(){
+
+  it('exposes types', function(){
+    expect(parser.CONNECT).to.be.a('number');
+    expect(parser.DISCONNECT).to.be.a('number');
+    expect(parser.EVENT).to.be.a('number');
+    expect(parser.ACK).to.be.a('number');
+    expect(parser.ERROR).to.be.a('number');
+  });
+
+  it('encodes connection', function(){
+    helpers.test({
+      type: parser.CONNECT,
+      nsp: '/woot'
+    });
+  });
+
+  it('encodes disconnection', function(){
+    helpers.test({
+      type: parser.DISCONNECT,
+      nsp: '/woot'
+    });
+  });
+
+  it('encodes an event', function(){
+    helpers.test({
+      type: parser.EVENT,
+      data: ['a', 1, {}],
+      nsp: '/'
+    });
+    helpers.test({
+      type: parser.EVENT,
+      data: ['a', 1, {}],
+      id: 1,
+      nsp: '/test'
+    });
+  });
+
+  it('encodes an ack', function(){
+    helpers.test({
+      type: parser.ACK,
+      data: ['a', 1, {}],
+      id: 123,
+      nsp: '/'
+    });
+  });
+
+  it('decodes a bad binary packet', function(){
+    try {
+      var decoder = new parser.Decoder();
+      decoder.add('5');
+    } catch(e){
+      expect(e.message).to.match(/Illegal/);
+    }
+  });
+});
diff --git a/test/support/env.js b/test/support/env.js
new file mode 100644
index 0000000..c1d494e
--- /dev/null
+++ b/test/support/env.js
@@ -0,0 +1,5 @@
+// WARNING this is bad practice
+// we only do this in our tests because we need to test engine.io-client
+// support in browsers and in node.js
+// some tests do not yet work in both
+module.exports.browser = !!global.window;

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/node-socket.io-parser.git



More information about the Pkg-javascript-commits mailing list