[Pkg-javascript-commits] [node-compression] 01/02: Imported Upstream version 1.0.7
Leo Iannacone
l3on-guest at moszumanska.debian.org
Sat Jun 14 15:49:52 UTC 2014
This is an automated email from the git hooks/post-receive script.
l3on-guest pushed a commit to branch master
in repository node-compression.
commit a649734c869a7c0d89786ffef4a29197153d1cd7
Author: Leo Iannacone <l3on at ubuntu.com>
Date: Sat Jun 14 17:33:14 2014 +0200
Imported Upstream version 1.0.7
---
.npmignore | 3 +
.travis.yml | 11 ++
HISTORY.md | 47 ++++++
README.md | 58 +++++++
index.js | 199 ++++++++++++++++++++++++
package.json | 32 ++++
test/test.js | 494 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
7 files changed, 844 insertions(+)
diff --git a/.npmignore b/.npmignore
new file mode 100644
index 0000000..cd39b77
--- /dev/null
+++ b/.npmignore
@@ -0,0 +1,3 @@
+coverage/
+test/
+.travis.yml
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..1ff243c
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,11 @@
+language: node_js
+node_js:
+ - "0.8"
+ - "0.10"
+ - "0.11"
+matrix:
+ allow_failures:
+ - node_js: "0.11"
+ fast_finish: true
+script: "npm run-script test-travis"
+after_script: "npm install coveralls at 2.10.0 && cat ./coverage/lcov.info | coveralls"
diff --git a/HISTORY.md b/HISTORY.md
new file mode 100644
index 0000000..f9b7667
--- /dev/null
+++ b/HISTORY.md
@@ -0,0 +1,47 @@
+1.0.7 / 2014-06-11
+==================
+
+ * use vary module for better `Vary` behavior
+ * deps: accepts at 1.0.3
+ * deps: compressible at 1.1.0
+
+1.0.6 / 2014-06-03
+==================
+
+ * fix regression when negotiation fails
+
+1.0.5 / 2014-06-03
+==================
+
+ * fix listeners for delayed stream creation
+ - fixes regression for certain `stream.pipe(res)` situations
+
+1.0.4 / 2014-06-03
+==================
+
+ * fix adding `Vary` when value stored as array
+ * fix back-pressure behavior
+ * fix length check for `res.end`
+
+1.0.3 / 2014-05-29
+==================
+
+ * use `accepts` for negotiation
+ * use `on-headers` to handle header checking
+ * deps: bytes at 1.0.0
+
+1.0.2 / 2014-04-29
+==================
+
+ * only version compatible with node.js 0.8
+ * support headers given to `res.writeHead`
+ * deps: bytes at 0.3.0
+ * deps: negotiator at 0.4.3
+
+1.0.1 / 2014-03-08
+==================
+
+ * bump negotiator
+ * use compressible
+ * use .headersSent (drops 0.8 support)
+ * handle identity;q=0 case
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..5a1b6bb
--- /dev/null
+++ b/README.md
@@ -0,0 +1,58 @@
+# compression
+
+[![NPM version](https://badge.fury.io/js/compression.svg)](http://badge.fury.io/js/compression)
+[![Build Status](https://travis-ci.org/expressjs/compression.svg?branch=master)](https://travis-ci.org/expressjs/compression)
+[![Coverage Status](https://img.shields.io/coveralls/expressjs/compression.svg?branch=master)](https://coveralls.io/r/expressjs/compression)
+
+Node.js compression middleware.
+
+## API
+
+```js
+var express = require('express')
+var compression = require('compression')
+
+var app = express()
+app.use(compression())
+```
+
+### compression(options)
+
+Returns the compression middleware using the given `options`.
+
+```js
+app.use(compression({
+ threshold: 512
+}))
+```
+
+#### Options
+
+- `threshold` `<1kb>` - response is only compressed if the byte size is at or above this threshold.
+- `filter` - a filtering callback function. Uses [Compressible](https://github.com/expressjs/compressible) by default.
+
+In addition to these, [zlib](http://nodejs.org/api/zlib.html) options may be passed in to the options object.
+
+## License
+
+The MIT License (MIT)
+
+Copyright (c) 2014 Jonathan Ong me at jongleberry.com
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/index.js b/index.js
new file mode 100644
index 0000000..5106de6
--- /dev/null
+++ b/index.js
@@ -0,0 +1,199 @@
+/*!
+ * compression
+ * Copyright(c) 2010 Sencha Inc.
+ * Copyright(c) 2011 TJ Holowaychuk
+ * MIT Licensed
+ */
+
+/**
+ * Module dependencies.
+ */
+
+var zlib = require('zlib');
+var accepts = require('accepts');
+var bytes = require('bytes');
+var onHeaders = require('on-headers');
+var compressible = require('compressible');
+var vary = require('vary');
+
+/**
+ * Supported content-encoding methods.
+ */
+
+exports.methods = {
+ gzip: zlib.createGzip
+ , deflate: zlib.createDeflate
+};
+
+/**
+ * Default filter function.
+ */
+
+exports.filter = function(req, res){
+ return compressible(res.getHeader('Content-Type'));
+};
+
+/**
+ * Compress response data with gzip / deflate.
+ *
+ * See README.md for documentation of options.
+ *
+ * @param {Object} options
+ * @return {Function} middleware
+ * @api public
+ */
+
+module.exports = function compression(options) {
+ options = options || {};
+ var filter = options.filter || exports.filter;
+ var threshold;
+
+ if (false === options.threshold || 0 === options.threshold) {
+ threshold = 0
+ } else if ('string' === typeof options.threshold) {
+ threshold = bytes(options.threshold)
+ } else {
+ threshold = options.threshold || 1024
+ }
+
+ return function compression(req, res, next){
+ var compress = true
+ var listeners = []
+ var write = res.write
+ var on = res.on
+ var end = res.end
+ var stream
+
+ // see #8
+ req.on('close', function(){
+ res.write = res.end = function(){};
+ });
+
+ // flush is noop by default
+ res.flush = noop;
+
+ // proxy
+
+ res.write = function(chunk, encoding){
+ if (!this._header) {
+ // if content-length is set and is lower
+ // than the threshold, don't compress
+ var length = res.getHeader('content-length');
+ if (!isNaN(length) && length < threshold) compress = false;
+ this._implicitHeader();
+ }
+ return stream
+ ? stream.write(new Buffer(chunk, encoding))
+ : write.call(res, chunk, encoding);
+ };
+
+ res.end = function(chunk, encoding){
+ var len
+
+ if (chunk) {
+ len = Buffer.isBuffer(chunk)
+ ? chunk.length
+ : Buffer.byteLength(chunk, encoding)
+ }
+
+ if (!this._header) {
+ compress = len && len >= threshold
+ }
+
+ if (chunk) {
+ this.write(chunk, encoding);
+ }
+
+ return stream
+ ? stream.end()
+ : end.call(res);
+ };
+
+ res.on = function(type, listener){
+ if (!listeners || type !== 'drain') {
+ return on.call(this, type, listener)
+ }
+
+ if (stream) {
+ return stream.on(type, listener)
+ }
+
+ // buffer listeners for future stream
+ listeners.push([type, listener])
+
+ return this
+ }
+
+ function nocompress(){
+ addListeners(res, on, listeners)
+ listeners = null
+ }
+
+ onHeaders(res, function(){
+ // default request filter
+ if (!filter(req, res)) return nocompress()
+
+ // vary
+ vary(res, 'Accept-Encoding')
+
+ if (!compress) return nocompress()
+
+ var encoding = res.getHeader('Content-Encoding') || 'identity';
+
+ // already encoded
+ if ('identity' !== encoding) return nocompress()
+
+ // head
+ if ('HEAD' === req.method) return nocompress()
+
+ // compression method
+ var accept = accepts(req);
+ var method = accept.encodings(['gzip', 'deflate', 'identity']);
+
+ // negotiation failed
+ if (!method || method === 'identity') return nocompress()
+
+ // compression stream
+ stream = exports.methods[method](options);
+ addListeners(stream, stream.on, listeners)
+
+ // overwrite the flush method
+ res.flush = function(){
+ stream.flush();
+ }
+
+ // header fields
+ res.setHeader('Content-Encoding', method);
+ res.removeHeader('Content-Length');
+
+ // compression
+ stream.on('data', function(chunk){
+ if (write.call(res, chunk) === false) {
+ stream.pause()
+ }
+ });
+
+ stream.on('end', function(){
+ end.call(res);
+ });
+
+ on.call(res, 'drain', function() {
+ stream.resume()
+ });
+ });
+
+ next();
+ };
+};
+
+/**
+ * Add bufferred listeners to stream
+ */
+
+function addListeners(stream, on, listeners) {
+ for (var i = 0; i < listeners.length; i++) {
+ on.apply(stream, listeners[i])
+ }
+}
+
+function noop(){}
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..2e4e81a
--- /dev/null
+++ b/package.json
@@ -0,0 +1,32 @@
+{
+ "name": "compression",
+ "description": "Compression middleware for connect and node.js",
+ "version": "1.0.7",
+ "author": "Jonathan Ong <me at jongleberry.com> (http://jongleberry.com)",
+ "contributors": [
+ "Douglas Christopher Wilson <doug at somethingdoug.com>"
+ ],
+ "license": "MIT",
+ "repository": "expressjs/compression",
+ "dependencies": {
+ "accepts": "1.0.3",
+ "bytes": "1.0.0",
+ "compressible": "1.1.0",
+ "on-headers": "0.0.0",
+ "vary": "0.1.0"
+ },
+ "devDependencies": {
+ "istanbul": "0.2.10",
+ "mocha": "~1.20.1",
+ "supertest": "~0.13.0",
+ "should": "~4.0.1"
+ },
+ "engines": {
+ "node": ">= 0.8.0"
+ },
+ "scripts": {
+ "test": "mocha --check-leaks --reporter dot",
+ "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --check-leaks --reporter dot",
+ "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --check-leaks --reporter spec"
+ }
+}
diff --git a/test/test.js b/test/test.js
new file mode 100644
index 0000000..0aab512
--- /dev/null
+++ b/test/test.js
@@ -0,0 +1,494 @@
+var crypto = require('crypto');
+var http = require('http');
+var request = require('supertest');
+var should = require('should');
+
+var compress = require('..');
+
+describe('compress()', function(){
+ it('should gzip files', function(done){
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('hello, world')
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .expect('Content-Encoding', 'gzip', done)
+ })
+
+ it('should skip HEAD', function(done){
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('hello, world')
+ })
+
+ request(server)
+ .head('/')
+ .set('Accept-Encoding', 'gzip')
+ .end(function (err, res) {
+ if (err) return done(err)
+ res.headers.should.not.have.property('content-encoding')
+ done()
+ })
+ })
+
+ it('should skip unknown accept-encoding', function(done){
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('hello, world')
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'bogus')
+ .end(function (err, res) {
+ if (err) return done(err)
+ res.headers.should.not.have.property('content-encoding')
+ done()
+ })
+ })
+
+ it('should skip if content-encoding already set', function(done){
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Encoding', 'x-custom')
+ res.end('hello, world')
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .expect('Content-Encoding', 'x-custom')
+ .expect(200, 'hello, world', done)
+ })
+
+ it('should set Vary', function(done){
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('hello, world')
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .expect('Content-Encoding', 'gzip')
+ .expect('Vary', 'Accept-Encoding', done)
+ })
+
+ it('should set Vary even if Accept-Encoding is not set', function(done){
+ var server = createServer({ threshold: 1000 }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('hello, world')
+ })
+
+ request(server)
+ .get('/')
+ .expect('Vary', 'Accept-Encoding')
+ .end(function (err, res) {
+ if (err) return done(err)
+ res.headers.should.not.have.property('content-encoding')
+ done()
+ })
+ })
+
+ it('should not set Vary if Content-Type does not pass filter', function(done){
+ var server = createServer(null, function (req, res) {
+ res.setHeader('Content-Type', 'image/jpeg')
+ res.end()
+ })
+
+ request(server)
+ .get('/')
+ .end(function (err, res) {
+ if (err) return done(err)
+ res.headers.should.not.have.property('vary')
+ done()
+ })
+ })
+
+ it('should transfer chunked', function(done){
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('hello, world')
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .expect('Transfer-Encoding', 'chunked', done)
+ })
+
+ it('should remove Content-Length for chunked', function(done){
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('hello, world')
+ })
+
+ request(server)
+ .get('/')
+ .expect('Content-Encoding', 'gzip')
+ .end(function (err, res) {
+ if (err) return done(err)
+ res.headers.should.not.have.property('content-length')
+ done()
+ })
+ })
+
+ it('should allow writing after close', function(done){
+ // UGH
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.on('close', function () {
+ res.write('hello, ')
+ res.end('world')
+ done()
+ })
+ res.destroy()
+ })
+
+ request(server)
+ .get('/')
+ .end(function(){})
+ })
+
+ it('should back-pressure when compressed', function(done){
+ var buf
+ var client
+ var drained = false
+ var resp
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ resp = res
+ res.on('drain', function(){
+ drained = true
+ })
+ res.setHeader('Content-Type', 'text/plain')
+ res.write('start')
+ pressure()
+ })
+ var wait = 2
+
+ crypto.pseudoRandomBytes(1024 * 128, function(err, chunk){
+ buf = chunk
+ pressure()
+ })
+
+ function complete(){
+ if (--wait !== 0) return
+ drained.should.be.true
+ done()
+ }
+
+ function pressure(){
+ if (!buf || !resp || !client) return
+
+ while (resp.write(buf) !== false) {
+ resp.flush()
+ }
+
+ resp.on('drain', function(){
+ resp.write('end')
+ resp.end()
+ })
+ resp.on('finish', complete)
+ client.resume()
+ }
+
+ request(server)
+ .get('/')
+ .request()
+ .on('response', function (res) {
+ client = res
+ res.headers['content-encoding'].should.equal('gzip')
+ res.pause()
+ res.on('end', complete)
+ pressure()
+ })
+ .end()
+ })
+
+ it('should back-pressure when uncompressed', function(done){
+ var buf
+ var client
+ var drained = false
+ var resp
+ var server = createServer({ filter: function(){ return false } }, function (req, res) {
+ resp = res
+ res.on('drain', function(){
+ drained = true
+ })
+ res.setHeader('Content-Type', 'text/plain')
+ res.write('start')
+ pressure()
+ })
+ var wait = 2
+
+ crypto.pseudoRandomBytes(1024 * 128, function(err, chunk){
+ buf = chunk
+ pressure()
+ })
+
+ function complete(){
+ if (--wait !== 0) return
+ drained.should.be.true
+ done()
+ }
+
+ function pressure(){
+ if (!buf || !resp || !client) return
+
+ while (resp.write(buf) !== false) {
+ resp.flush()
+ }
+
+ resp.on('drain', function(){
+ resp.write('end')
+ resp.end()
+ })
+ resp.on('finish', complete)
+ client.resume()
+ }
+
+ request(server)
+ .get('/')
+ .request()
+ .on('response', function (res) {
+ client = res
+ res.headers.should.not.have.property('content-encoding')
+ res.pause()
+ res.on('end', complete)
+ pressure()
+ })
+ .end()
+ })
+
+ describe('threshold', function(){
+ it('should not compress responses below the threshold size', function(done){
+ var server = createServer({ threshold: '1kb' }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Length', '12')
+ res.end('hello, world')
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .end(function(err, res){
+ if (err) return done(err)
+ res.headers.should.not.have.property('content-encoding')
+ done()
+ })
+ })
+
+ it('should compress responses above the threshold size', function(done){
+ var server = createServer({ threshold: '1kb' }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Length', '2048')
+ res.end(new Buffer(2048))
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .expect('Content-Encoding', 'gzip', done)
+ })
+
+ it('should compress when streaming without a content-length', function(done){
+ var server = createServer({ threshold: '1kb' }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.write('hello, ')
+ setTimeout(function(){
+ res.end('world')
+ }, 10)
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .expect('Content-Encoding', 'gzip', done)
+ })
+
+ it('should not compress when streaming and content-length is lower than threshold', function(done){
+ var server = createServer({ threshold: '1kb' }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Length', '12')
+ res.write('hello, ')
+ setTimeout(function(){
+ res.end('world')
+ }, 10)
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .end(function(err, res){
+ if (err) return done(err)
+ res.headers.should.not.have.property('content-encoding')
+ done()
+ })
+ })
+
+ it('should compress when streaming and content-length is larger than threshold', function(done){
+ var server = createServer({ threshold: '1kb' }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Length', '2048')
+ res.write(new Buffer(1024))
+ setTimeout(function(){
+ res.end(new Buffer(1024))
+ }, 10)
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .expect('Content-Encoding', 'gzip', done)
+ })
+
+ // res.end(str, encoding) broken in node.js 0.8
+ var run = /^v0\.8\./.test(process.version) ? it.skip : it
+ run('should handle writing hex data', function(done){
+ var server = createServer({ threshold: 6 }, function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('2e2e2e2e', 'hex')
+ })
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .expect(200, '....', function (err, res) {
+ if (err) return done(err)
+ res.headers.should.not.have.property('content-encoding')
+ done()
+ })
+ })
+ })
+
+ describe('res.flush()', function () {
+ it('should always be present', function (done) {
+ var server = createServer(null, function (req, res) {
+ res.statusCode = typeof res.flush === 'function'
+ ? 200
+ : 500
+ res.flush()
+ res.end()
+ })
+
+ request(server)
+ .get('/')
+ .expect(200, done)
+ })
+
+ it('should flush the response', function (done) {
+ var chunks = 0
+ var resp
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ resp = res
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Length', '2048')
+ write()
+ })
+
+ function write() {
+ chunks++
+ if (chunks === 2) return resp.end()
+ if (chunks > 2) return chunks--
+ resp.write(new Buffer(1024))
+ resp.flush()
+ }
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .request()
+ .on('response', function (res) {
+ res.headers['content-encoding'].should.equal('gzip')
+ res.on('data', write)
+ res.on('end', function(){
+ chunks.should.equal(2)
+ done()
+ })
+ })
+ .end()
+ })
+
+ it('should flush small chunks for gzip', function (done) {
+ var chunks = 0
+ var resp
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ resp = res
+ res.setHeader('Content-Type', 'text/plain')
+ write()
+ })
+
+ function write() {
+ chunks++
+ if (chunks === 20) return resp.end()
+ if (chunks > 20) return chunks--
+ resp.write('..')
+ resp.flush()
+ }
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'gzip')
+ .request()
+ .on('response', function (res) {
+ res.headers['content-encoding'].should.equal('gzip')
+ res.on('data', write)
+ res.on('end', function(){
+ chunks.should.equal(20)
+ done()
+ })
+ })
+ .end()
+ })
+
+ it('should flush small chunks for deflate', function (done) {
+ var chunks = 0
+ var resp
+ var server = createServer({ threshold: 0 }, function (req, res) {
+ resp = res
+ res.setHeader('Content-Type', 'text/plain')
+ write()
+ })
+
+ function write() {
+ chunks++
+ if (chunks === 20) return resp.end()
+ if (chunks > 20) return chunks--
+ resp.write('..')
+ resp.flush()
+ }
+
+ request(server)
+ .get('/')
+ .set('Accept-Encoding', 'deflate')
+ .request()
+ .on('response', function (res) {
+ res.headers['content-encoding'].should.equal('deflate')
+ res.on('data', write)
+ res.on('end', function(){
+ chunks.should.equal(20)
+ done()
+ })
+ })
+ .end()
+ })
+ })
+})
+
+function createServer(opts, fn) {
+ var _compress = compress(opts)
+ return http.createServer(function (req, res) {
+ _compress(req, res, function (err) {
+ if (err) {
+ res.statusCode = err.status || 500
+ res.end(err.message)
+ return;
+ }
+
+ fn(req, res)
+ })
+ })
+}
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/node-compression.git
More information about the Pkg-javascript-commits
mailing list