[Pkg-javascript-commits] [science.js] 66/87: Version 1.9.0: better npm support.

bhuvan krishna bhuvan-guest at moszumanska.debian.org
Thu Dec 8 06:12:01 UTC 2016


This is an automated email from the git hooks/post-receive script.

bhuvan-guest pushed a commit to branch master
in repository science.js.

commit e324ecd309df26e892758d28df83f7c86f4cb306
Author: Jason Davies <jason at jasondavies.com>
Date:   Thu Apr 19 22:28:54 2012 +0100

    Version 1.9.0: better npm support.
    
    All modules are now included in a single file: science.v1.js or
    science.v1.min.js.
    
    Use "npm install science" to install for Node.js.
---
 Makefile                                 |  47 +-
 index.js                                 |   3 +
 package.json                             |   4 +-
 science.js                               |  72 ---
 science.lin.min.js                       |   1 -
 science.min.js                           |   1 -
 science.stats.js                         | 777 ----------------------------
 science.stats.min.js                     |   1 -
 science.lin.js => science.v1.js          | 855 ++++++++++++++++++++++++++++++-
 science.v1.min.js                        |   1 +
 src/core/core.js                         |   2 +-
 src/end.js                               |   2 +-
 src/package.js                           |   6 +-
 test/core/ascending-test.js              |   2 +-
 test/core/expm1-test.js                  |   2 +-
 test/core/hypot-test.js                  |   2 +-
 test/core/quadratic-test.js              |   2 +-
 test/core/zeroes-test.js                 |   2 +-
 test/lin/decompose-test.js               |   4 +-
 test/lin/tridag-test.js                  |   4 +-
 test/stats/bandwidth-test.js             |   4 +-
 test/stats/distance-test.js              |   4 +-
 test/stats/distribution/gaussian-test.js |   4 +-
 test/stats/erf-test.js                   |   4 +-
 test/stats/hcluster-test.js              |   4 +-
 test/stats/iqr-test.js                   |   4 +-
 test/stats/kmeans-test.js                |   4 +-
 test/stats/loess-test.js                 |   4 +-
 test/stats/mean-test.js                  |   4 +-
 test/stats/median-test.js                |   4 +-
 test/stats/mode-test.js                  |   4 +-
 test/stats/phi-test.js                   |   4 +-
 test/stats/variance-test.js              |   4 +-
 33 files changed, 918 insertions(+), 924 deletions(-)

diff --git a/Makefile b/Makefile
index e686c2c..bbdfb58 100644
--- a/Makefile
+++ b/Makefile
@@ -1,18 +1,17 @@
-JS_COMPILER = ./node_modules/uglify-js/bin/uglifyjs
-JS_TESTER = ./node_modules/vows/bin/vows
+NODE_PATH ?= ./node_modules
+JS_COMPILER = $(NODE_PATH)/uglify-js/bin/uglifyjs
+JS_TESTER = $(NODE_PATH)/vows/bin/vows
 
 all: \
-	science.js \
-	science.min.js \
-	science.lin.js \
-	science.lin.min.js \
-	science.stats.js \
-	science.stats.min.js \
+	science.v1.js \
+	science.v1.min.js \
 	package.json
 
-.INTERMEDIATE science.js: \
+.INTERMEDIATE science.v1.js: \
 	src/start.js \
 	science.core.js \
+	science.lin.js \
+	science.stats.js \
 	src/end.js
 
 science.core.js: \
@@ -26,7 +25,6 @@ science.core.js: \
 	src/core/zeroes.js
 
 science.lin.js: \
-	src/start.js \
 	src/lin/lin.js \
 	src/lin/decompose.js \
 	src/lin/cross.js \
@@ -38,11 +36,9 @@ science.lin.js: \
 	src/lin/inverse.js \
 	src/lin/multiply.js \
 	src/lin/transpose.js \
-	src/lin/tridag.js \
-	src/end.js
+	src/lin/tridag.js
 
 science.stats.js: \
-	src/start.js \
 	src/stats/stats.js \
 	src/stats/bandwidth.js \
 	src/stats/distance.js \
@@ -60,8 +56,7 @@ science.stats.js: \
 	src/stats/quantiles.js \
 	src/stats/variance.js \
 	src/stats/distribution.js \
-	src/stats/distribution/gaussian.js \
-	src/end.js
+	src/stats/distribution/gaussian.js
 
 test: all
 	@$(JS_TESTER)
@@ -70,19 +65,19 @@ test: all
 	@rm -f $@
 	$(JS_COMPILER) < $< > $@
 
-package.json: science.js src/package.js
-	node src/package.js > $@
-
-science.js science%.js: Makefile
+science%.js: Makefile
 	@rm -f $@
 	cat $(filter %.js,$^) > $@
+	@chmod a-w $@
 
-%.test: %.js %.out all
-	@/bin/echo -n "test: $* "
-	@node $< > $*.actual
-	@diff -U 3 $*.out $*.actual && rm -f $*.actual \
-		&& echo '\033[1;32mPASS\033[0m' \
-		|| echo test: $* '\033[1;31mFAIL\033[0m'
+install:
+	mkdir -p node_modules
+	npm install
+
+package.json: science.v1.js src/package.js
+	@rm -f $@
+	node src/package.js > $@
+	@chmod a-w $@
 
 clean:
-	rm -f science*.js
+	rm -f science*.js package.json
diff --git a/index.js b/index.js
new file mode 100644
index 0000000..0a672a0
--- /dev/null
+++ b/index.js
@@ -0,0 +1,3 @@
+require("./science.v1");
+
+module.exports = science;
diff --git a/package.json b/package.json
index e88a3e8..aca4eda 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
 {
-  "name": "science.js",
-  "version": "1.8.1",
+  "name": "science",
+  "version": "1.9.0",
   "description": "Scientific and statistical computing in JavaScript.",
   "keywords": [
     "science",
diff --git a/science.js b/science.js
deleted file mode 100644
index 4f88b94..0000000
--- a/science.js
+++ /dev/null
@@ -1,72 +0,0 @@
-(function(){science = {version: "1.8.0"}; // semver
-science.ascending = function(a, b) {
-  return a - b;
-};
-// Euler's constant.
-science.EULER = .5772156649015329;
-// Compute exp(x) - 1 accurately for small x.
-science.expm1 = function(x) {
-  return (x < 1e-5 && x > -1e-5) ? x + .5 * x * x : Math.exp(x) - 1;
-};
-science.functor = function(v) {
-  return typeof v === "function" ? v : function() { return v; };
-};
-// Based on:
-// http://www.johndcook.com/blog/2010/06/02/whats-so-hard-about-finding-a-hypotenuse/
-science.hypot = function(x, y) {
-  x = Math.abs(x);
-  y = Math.abs(y);
-  var max,
-      min;
-  if (x > y) { max = x; min = y; }
-  else       { max = y; min = x; }
-  var r = min / max;
-  return max * Math.sqrt(1 + r * r);
-};
-science.quadratic = function() {
-  var complex = false;
-
-  function quadratic(a, b, c) {
-    var d = b * b - 4 * a * c;
-    if (d > 0) {
-      d = Math.sqrt(d) / (2 * a);
-      return complex
-        ? [{r: -b - d, i: 0}, {r: -b + d, i: 0}]
-        : [-b - d, -b + d];
-    } else if (d === 0) {
-      d = -b / (2 * a);
-      return complex ? [{r: d, i: 0}] : [d];
-    } else {
-      if (complex) {
-        d = Math.sqrt(-d) / (2 * a);
-        return [
-          {r: -b, i: -d},
-          {r: -b, i: d}
-        ];
-      }
-      return [];
-    }
-  }
-
-  quadratic.complex = function(x) {
-    if (!arguments.length) return complex;
-    complex = x;
-    return quadratic;
-  };
-
-  return quadratic;
-};
-// Constructs a multi-dimensional array filled with zeroes.
-science.zeroes = function(n) {
-  var i = -1,
-      a = [];
-  if (arguments.length === 1)
-    while (++i < n)
-      a[i] = 0;
-  else
-    while (++i < n)
-      a[i] = science.zeroes.apply(
-        this, Array.prototype.slice.call(arguments, 1));
-  return a;
-};
-})()
\ No newline at end of file
diff --git a/science.lin.min.js b/science.lin.min.js
deleted file mode 100644
index 0a474d0..0000000
--- a/science.lin.min.js
+++ /dev/null
@@ -1 +0,0 @@
-(function(){function a(a,b,c){var d=c.length;for(var e=0;e<d;e++)a[e]=c[d-1][e];for(var f=d-1;f>0;f--){var g=0,h=0;for(var i=0;i<f;i++)g+=Math.abs(a[i]);if(g===0){b[f]=a[f-1];for(var e=0;e<f;e++)a[e]=c[f-1][e],c[f][e]=0,c[e][f]=0}else{for(var i=0;i<f;i++)a[i]/=g,h+=a[i]*a[i];var j=a[f-1],k=Math.sqrt(h);j>0&&(k=-k),b[f]=g*k,h-=j*k,a[f-1]=j-k;for(var e=0;e<f;e++)b[e]=0;for(var e=0;e<f;e++){j=a[e],c[e][f]=j,k=b[e]+c[e][e]*j;for(var i=e+1;i<=f-1;i++)k+=c[i][e]*a[i],b[i]+=c[i][e]*j;b[e]=k}j=0 [...]
\ No newline at end of file
diff --git a/science.min.js b/science.min.js
deleted file mode 100644
index 3c29d3e..0000000
--- a/science.min.js
+++ /dev/null
@@ -1 +0,0 @@
-(function(){science={version:"1.8.0"},science.ascending=function(a,b){return a-b},science.EULER=.5772156649015329,science.expm1=function(a){return a<1e-5&&a>-0.00001?a+.5*a*a:Math.exp(a)-1},science.functor=function(a){return typeof a=="function"?a:function(){return a}},science.hypot=function(a,b){a=Math.abs(a),b=Math.abs(b);var c,d;a>b?(c=a,d=b):(c=b,d=a);var e=d/c;return c*Math.sqrt(1+e*e)},science.quadratic=function(){function b(b,c,d){var e=c*c-4*b*d;return e>0?(e=Math.sqrt(e)/(2*b),a [...]
\ No newline at end of file
diff --git a/science.stats.js b/science.stats.js
deleted file mode 100644
index 4558453..0000000
--- a/science.stats.js
+++ /dev/null
@@ -1,777 +0,0 @@
-(function(){science.stats = {};
-// Bandwidth selectors for Gaussian kernels.
-// Based on R's implementations in `stats.bw`.
-science.stats.bandwidth = {
-
-  // Silverman, B. W. (1986) Density Estimation. London: Chapman and Hall.
-  nrd0: function(x) {
-    var hi = Math.sqrt(science.stats.variance(x));
-    if (!(lo = Math.min(hi, science.stats.iqr(x) / 1.34)))
-      (lo = hi) || (lo = Math.abs(x[1])) || (lo = 1);
-    return .9 * lo * Math.pow(x.length, -.2);
-  },
-
-  // Scott, D. W. (1992) Multivariate Density Estimation: Theory, Practice, and
-  // Visualization. Wiley.
-  nrd: function(x) {
-    var h = science.stats.iqr(x) / 1.34;
-    return 1.06 * Math.min(Math.sqrt(science.stats.variance(x)), h)
-      * Math.pow(x.length, -1/5);
-  }
-};
-science.stats.distance = {
-  euclidean: function(a, b) {
-    var n = a.length,
-        i = -1,
-        s = 0,
-        x;
-    while (++i < n) {
-      x = a[i] - b[i];
-      s += x * x;
-    }
-    return Math.sqrt(s);
-  },
-  manhattan: function(a, b) {
-    var n = a.length,
-        i = -1,
-        s = 0;
-    while (++i < n) s += Math.abs(a[i] - b[i]);
-    return s;
-  },
-  minkowski: function(p) {
-    return function(a, b) {
-      var n = a.length,
-          i = -1,
-          s = 0;
-      while (++i < n) s += Math.pow(Math.abs(a[i] - b[i]), p);
-      return Math.pow(s, 1 / p);
-    };
-  },
-  chebyshev: function(a, b) {
-    var n = a.length,
-        i = -1,
-        max = 0,
-        x;
-    while (++i < n) {
-      x = Math.abs(a[i] - b[i]);
-      if (x > max) max = x;
-    }
-    return max;
-  },
-  hamming: function(a, b) {
-    var n = a.length,
-        i = -1,
-        d = 0;
-    while (++i < n) if (a[i] !== b[i]) d++;
-    return d;
-  },
-  jaccard: function(a, b) {
-    var n = a.length,
-        i = -1,
-        s = 0;
-    while (++i < n) if (a[i] === b[i]) s++;
-    return s / n;
-  },
-  braycurtis: function(a, b) {
-    var n = a.length,
-        i = -1,
-        s0 = 0,
-        s1 = 0,
-        ai,
-        bi;
-    while (++i < n) {
-      ai = a[i];
-      bi = b[i];
-      s0 += Math.abs(ai - bi);
-      s1 += Math.abs(ai + bi);
-    }
-    return s0 / s1;
-  }
-};
-// Based on implementation in http://picomath.org/.
-science.stats.erf = function(x) {
-  var a1 =  0.254829592,
-      a2 = -0.284496736,
-      a3 =  1.421413741,
-      a4 = -1.453152027,
-      a5 =  1.061405429,
-      p  =  0.3275911;
-
-  // Save the sign of x
-  var sign = x < 0 ? -1 : 1;
-  if (x < 0) {
-    sign = -1;
-    x = -x;
-  }
-
-  // A&S formula 7.1.26
-  var t = 1 / (1 + p * x);
-  return sign * (
-    1 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1)
-    * t * Math.exp(-x * x));
-};
-science.stats.phi = function(x) {
-  return .5 * (1 + science.stats.erf(x / Math.SQRT2));
-};
-// See <http://en.wikipedia.org/wiki/Kernel_(statistics)>.
-science.stats.kernel = {
-  uniform: function(u) {
-    if (u <= 1 && u >= -1) return .5;
-    return 0;
-  },
-  triangular: function(u) {
-    if (u <= 1 && u >= -1) return 1 - Math.abs(u);
-    return 0;
-  },
-  epanechnikov: function(u) {
-    if (u <= 1 && u >= -1) return .75 * (1 - u * u);
-    return 0;
-  },
-  quartic: function(u) {
-    if (u <= 1 && u >= -1) {
-      var tmp = 1 - u * u;
-      return (15 / 16) * tmp * tmp;
-    }
-    return 0;
-  },
-  triweight: function(u) {
-    if (u <= 1 && u >= -1) {
-      var tmp = 1 - u * u;
-      return (35 / 32) * tmp * tmp * tmp;
-    }
-    return 0;
-  },
-  gaussian: function(u) {
-    return 1 / Math.sqrt(2 * Math.PI) * Math.exp(-.5 * u * u);
-  },
-  cosine: function(u) {
-    if (u <= 1 && u >= -1) return Math.PI / 4 * Math.cos(Math.PI / 2 * u);
-    return 0;
-  }
-};
-// http://exploringdata.net/den_trac.htm
-science.stats.kde = function() {
-  var kernel = science.stats.kernel.gaussian,
-      sample = [],
-      bandwidth = science.stats.bandwidth.nrd;
-
-  function kde(points, i) {
-    var bw = bandwidth.call(this, sample);
-    return points.map(function(x) {
-      var i = -1,
-          y = 0,
-          n = sample.length;
-      while (++i < n) {
-        y += kernel((x - sample[i]) / bw);
-      }
-      return [x, y / bw / n];
-    });
-  }
-
-  kde.kernel = function(x) {
-    if (!arguments.length) return kernel;
-    kernel = x;
-    return kde;
-  };
-
-  kde.sample = function(x) {
-    if (!arguments.length) return sample;
-    sample = x;
-    return kde;
-  };
-
-  kde.bandwidth = function(x) {
-    if (!arguments.length) return bandwidth;
-    bandwidth = science.functor(x);
-    return kde;
-  };
-
-  return kde;
-};
-// Based on figue implementation by Jean-Yves Delort.
-// http://code.google.com/p/figue/
-science.stats.kmeans = function() {
-  var distance = science.stats.distance.euclidean,
-      maxIterations = 1000,
-      k = 1;
-
-  function kmeans(vectors) {
-    var n = vectors.length,
-        assignments = [],
-        clusterSizes = [],
-        repeat = 1,
-        iterations = 0,
-        centroids = science_stats_kmeansRandom(k, vectors),
-        newCentroids,
-        i,
-        j,
-        x,
-        d,
-        min,
-        best;
-
-    while (repeat && iterations < maxIterations) {
-      // Assignment step.
-      j = -1; while (++j < k) {
-        clusterSizes[j] = 0;
-      }
-
-      i = -1; while (++i < n) {
-        x = vectors[i];
-        min = Infinity;
-        j = -1; while (++j < k) {
-          d = distance.call(this, centroids[j], x);
-          if (d < min) {
-            min = d;
-            best = j;
-          }
-        }
-        clusterSizes[assignments[i] = best]++;
-      }
-
-      // Update centroids step.
-      newCentroids = [];
-      i = -1; while (++i < n) {
-        x = assignments[i];
-        d = newCentroids[x];
-        if (d == null) newCentroids[x] = vectors[i].slice();
-        else {
-          j = -1; while (++j < d.length) {
-            d[j] += vectors[i][j];
-          }
-        }
-      }
-      j = -1; while (++j < k) {
-        x = newCentroids[j];
-        d = 1 / clusterSizes[j];
-        i = -1; while (++i < x.length) x[i] *= d;
-      }
-
-      // Check convergence.
-      repeat = 0;
-      j = -1; while (++j < k) {
-        if (!science_stats_kmeansCompare(newCentroids[j], centroids[j])) {
-          repeat = 1;
-          break;
-        }
-      }
-      centroids = newCentroids;
-      iterations++;
-    }
-    return {assignments: assignments, centroids: centroids};
-  }
-
-  kmeans.k = function(x) {
-    if (!arguments.length) return k;
-    k = x;
-    return kmeans;
-  };
-
-  kmeans.distance = function(x) {
-    if (!arguments.length) return distance;
-    distance = x;
-    return kmeans;
-  };
-
-  return kmeans;
-};
-
-function science_stats_kmeansCompare(a, b) {
-  if (!a || !b || a.length !== b.length) return false;
-  var n = a.length,
-      i = -1;
-  while (++i < n) if (a[i] !== b[i]) return false;
-  return true;
-}
-
-// Returns an array of k distinct vectors randomly selected from the input
-// array of vectors. Returns null if k > n or if there are less than k distinct
-// objects in vectors.
-function science_stats_kmeansRandom(k, vectors) {
-  var n = vectors.length;
-  if (k > n) return null;
-  
-  var selected_vectors = [];
-  var selected_indices = [];
-  var tested_indices = {};
-  var tested = 0;
-  var selected = 0;
-  var i,
-      vector,
-      select;
-
-  while (selected < k) {
-    if (tested === n) return null;
-    
-    var random_index = Math.floor(Math.random() * n);
-    if (random_index in tested_indices) continue;
-    
-    tested_indices[random_index] = 1;
-    tested++;
-    vector = vectors[random_index];
-    select = true;
-    for (i = 0; i < selected; i++) {
-      if (science_stats_kmeansCompare(vector, selected_vectors[i])) {
-        select = false;
-        break;
-      }
-    }
-    if (select) {
-      selected_vectors[selected] = vector;
-      selected_indices[selected] = random_index;
-      selected++;
-    }
-  }
-  return selected_vectors;
-}
-science.stats.hcluster = function() {
-  var distance = science.stats.distance.euclidean,
-      linkage = "simple"; // simple, complete or average
-
-  function hcluster(vectors) {
-    var n = vectors.length,
-        dMin = [],
-        cSize = [],
-        distMatrix = [],
-        clusters = [],
-        c1,
-        c2,
-        c1Cluster,
-        c2Cluster,
-        p,
-        root,
-        i,
-        j;
-
-    // Initialise distance matrix and vector of closest clusters.
-    i = -1; while (++i < n) {
-      dMin[i] = 0;
-      distMatrix[i] = [];
-      j = -1; while (++j < n) {
-        distMatrix[i][j] = i === j ? Infinity : distance(vectors[i] , vectors[j]);
-        if (distMatrix[i][dMin[i]] > distMatrix[i][j]) dMin[i] = j;
-      }
-    }
-
-    // create leaves of the tree
-    i = -1; while (++i < n) {
-      clusters[i] = [];
-      clusters[i][0] = {
-        left: null,
-        right: null,
-        dist: 0,
-        centroid: vectors[i],
-        size: 1,
-        depth: 0
-      };
-      cSize[i] = 1;
-    }
-
-    // Main loop
-    for (p = 0; p < n-1; p++) {
-      // find the closest pair of clusters
-      c1 = 0;
-      for (i = 0; i < n; i++) {
-        if (distMatrix[i][dMin[i]] < distMatrix[c1][dMin[c1]]) c1 = i;
-      }
-      c2 = dMin[c1];
-
-      // create node to store cluster info 
-      c1Cluster = clusters[c1][0];
-      c2Cluster = clusters[c2][0];
-
-      newCluster = {
-        left: c1Cluster,
-        right: c2Cluster,
-        dist: distMatrix[c1][c2],
-        centroid: calculateCentroid(c1Cluster.size, c1Cluster.centroid,
-          c2Cluster.size, c2Cluster.centroid),
-        size: c1Cluster.size + c2Cluster.size,
-        depth: 1 + Math.max(c1Cluster.depth, c2Cluster.depth)
-      };
-      clusters[c1].splice(0, 0, newCluster);
-      cSize[c1] += cSize[c2];
-
-      // overwrite row c1 with respect to the linkage type
-      for (j = 0; j < n; j++) {
-        switch (linkage) {
-          case "single":
-            if (distMatrix[c1][j] > distMatrix[c2][j])
-              distMatrix[j][c1] = distMatrix[c1][j] = distMatrix[c2][j];
-            break;
-          case "complete":
-            if (distMatrix[c1][j] < distMatrix[c2][j])
-              distMatrix[j][c1] = distMatrix[c1][j] = distMatrix[c2][j];
-            break;
-          case "average":
-            distMatrix[j][c1] = distMatrix[c1][j] = (cSize[c1] * distMatrix[c1][j] + cSize[c2] * distMatrix[c2][j]) / (cSize[c1] + cSize[j]);
-            break;
-        }
-      }
-      distMatrix[c1][c1] = Infinity;
-
-      // infinity ­out old row c2 and column c2
-      for (i = 0; i < n; i++)
-        distMatrix[i][c2] = distMatrix[c2][i] = Infinity;
-
-      // update dmin and replace ones that previous pointed to c2 to point to c1
-      for (j = 0; j < n; j++) {
-        if (dMin[j] == c2) dMin[j] = c1;
-        if (distMatrix[c1][j] < distMatrix[c1][dMin[c1]]) dMin[c1] = j;
-      }
-
-      // keep track of the last added cluster
-      root = newCluster;
-    }
-
-    return root;
-  }
-
-  hcluster.distance = function(x) {
-    if (!arguments.length) return distance;
-    distance = x;
-    return hcluster;
-  };
-
-  return hcluster;
-};
-
-function calculateCentroid(c1Size, c1Centroid, c2Size, c2Centroid) {
-  var newCentroid = [],
-      newSize = c1Size + c2Size,
-      n = c1Centroid.length,
-      i = -1;
-  while (++i < n) {
-    newCentroid[i] = (c1Size * c1Centroid[i] + c2Size * c2Centroid[i]) / newSize;
-  }
-  return newCentroid;
-}
-science.stats.iqr = function(x) {
-  var quartiles = science.stats.quantiles(x, [.25, .75]);
-  return quartiles[1] - quartiles[0];
-};
-// Based on org.apache.commons.math.analysis.interpolation.LoessInterpolator
-// from http://commons.apache.org/math/
-science.stats.loess = function() {    
-  var bandwidth = .3,
-      robustnessIters = 2,
-      accuracy = 1e-12;
-
-  function smooth(xval, yval, weights) {
-    var n = xval.length,
-        i;
-
-    if (n !== yval.length) throw {error: "Mismatched array lengths"};
-    if (n == 0) throw {error: "At least one point required."};
-
-    if (arguments.length < 3) {
-      weights = [];
-      i = -1; while (++i < n) weights[i] = 1;
-    }
-
-    science_stats_loessFiniteReal(xval);
-    science_stats_loessFiniteReal(yval);
-    science_stats_loessFiniteReal(weights);
-    science_stats_loessStrictlyIncreasing(xval);
-
-    if (n == 1) return [yval[0]];
-    if (n == 2) return [yval[0], yval[1]];
-
-    var bandwidthInPoints = Math.floor(bandwidth * n);
-
-    if (bandwidthInPoints < 2) throw {error: "Bandwidth too small."};
-
-    var res = [],
-        residuals = [],
-        robustnessWeights = [];
-
-    // Do an initial fit and 'robustnessIters' robustness iterations.
-    // This is equivalent to doing 'robustnessIters+1' robustness iterations
-    // starting with all robustness weights set to 1.
-    i = -1; while (++i < n) {
-      res[i] = 0;
-      residuals[i] = 0;
-      robustnessWeights[i] = 1;
-    }
-
-    var iter = -1;
-    while (++iter <= robustnessIters) {
-      var bandwidthInterval = [0, bandwidthInPoints - 1];
-      // At each x, compute a local weighted linear regression
-      var x;
-      i = -1; while (++i < n) {
-        x = xval[i];
-
-        // Find out the interval of source points on which
-        // a regression is to be made.
-        if (i > 0) {
-          science_stats_loessUpdateBandwidthInterval(xval, weights, i, bandwidthInterval);
-        }
-
-        var ileft = bandwidthInterval[0],
-            iright = bandwidthInterval[1];
-
-        // Compute the point of the bandwidth interval that is
-        // farthest from x
-        var edge = (xval[i] - xval[ileft]) > (xval[iright] - xval[i]) ? ileft : iright;
-
-        // Compute a least-squares linear fit weighted by
-        // the product of robustness weights and the tricube
-        // weight function.
-        // See http://en.wikipedia.org/wiki/Linear_regression
-        // (section "Univariate linear case")
-        // and http://en.wikipedia.org/wiki/Weighted_least_squares
-        // (section "Weighted least squares")
-        var sumWeights = 0,
-            sumX = 0,
-            sumXSquared = 0,
-            sumY = 0,
-            sumXY = 0,
-            denom = Math.abs(1 / (xval[edge] - x));
-
-        for (var k = ileft; k <= iright; ++k) {
-          var xk   = xval[k],
-              yk   = yval[k],
-              dist = k < i ? x - xk : xk - x,
-              w    = science_stats_loessTricube(dist * denom) * robustnessWeights[k] * weights[k],
-              xkw  = xk * w;
-          sumWeights += w;
-          sumX += xkw;
-          sumXSquared += xk * xkw;
-          sumY += yk * w;
-          sumXY += yk * xkw;
-        }
-
-        var meanX = sumX / sumWeights,
-            meanY = sumY / sumWeights,
-            meanXY = sumXY / sumWeights,
-            meanXSquared = sumXSquared / sumWeights;
-
-        var beta = (Math.sqrt(Math.abs(meanXSquared - meanX * meanX)) < accuracy)
-            ? 0 : ((meanXY - meanX * meanY) / (meanXSquared - meanX * meanX));
-
-        var alpha = meanY - beta * meanX;
-
-        res[i] = beta * x + alpha;
-        residuals[i] = Math.abs(yval[i] - res[i]);
-      }
-
-      // No need to recompute the robustness weights at the last
-      // iteration, they won't be needed anymore
-      if (iter === robustnessIters) {
-        break;
-      }
-
-      // Recompute the robustness weights.
-
-      // Find the median residual.
-      var sortedResiduals = residuals.slice();
-      sortedResiduals.sort();
-      var medianResidual = sortedResiduals[Math.floor(n / 2)];
-
-      if (Math.abs(medianResidual) < accuracy)
-        break;
-
-      var arg,
-          w;
-      i = -1; while (++i < n) {
-        arg = residuals[i] / (6 * medianResidual);
-        robustnessWeights[i] = (arg >= 1) ? 0 : ((w = 1 - arg * arg) * w);
-      }
-    }
-
-    return res;
-  }
-
-  smooth.bandwidth = function(x) {
-    if (!arguments.length) return x;
-    bandwidth = x;
-    return smooth;
-  };
-
-  smooth.robustnessIterations = function(x) {
-    if (!arguments.length) return x;
-    robustnessIters = x;
-    return smooth;
-  };
-
-  smooth.accuracy = function(x) {
-    if (!arguments.length) return x;
-    accuracy = x;
-    return smooth;
-  };
-
-  return smooth;
-};
-
-function science_stats_loessFiniteReal(values) {
-  var n = values.length,
-      i = -1;
-
-  while (++i < n) if (!isFinite(values[i])) return false;
-
-  return true;
-}
-
-function science_stats_loessStrictlyIncreasing(xval) {
-  var n = xval.length,
-      i = 0;
-
-  while (++i < n) if (xval[i - 1] >= xval[i]) return false;
-
-  return true;
-}
-
-// Compute the tricube weight function.
-// http://en.wikipedia.org/wiki/Local_regression#Weight_function
-function science_stats_loessTricube(x) {
-  return (x = 1 - x * x * x) * x * x;
-}
-
-// Given an index interval into xval that embraces a certain number of
-// points closest to xval[i-1], update the interval so that it embraces
-// the same number of points closest to xval[i], ignoring zero weights.
-function science_stats_loessUpdateBandwidthInterval(
-  xval, weights, i, bandwidthInterval) {
-
-  var left = bandwidthInterval[0],
-      right = bandwidthInterval[1];
-
-  // The right edge should be adjusted if the next point to the right
-  // is closer to xval[i] than the leftmost point of the current interval
-  var nextRight = science_stats_loessNextNonzero(weights, right);
-  if ((nextRight < xval.length) && (xval[nextRight] - xval[i]) < (xval[i] - xval[left])) {
-    var nextLeft = science_stats_loessNextNonzero(weights, left);
-    bandwidthInterval[0] = nextLeft;
-    bandwidthInterval[1] = nextRight;
-  }
-}
-
-function science_stats_loessNextNonzero(weights, i) {
-  var j = i + 1;
-  while (j < weights.length && weights[j] === 0) j++;
-  return j;
-}
-// Welford's algorithm.
-science.stats.mean = function(x) {
-  var n = x.length;
-  if (n === 0) return NaN;
-  var m = 0,
-      i = -1;
-  while (++i < n) m += (x[i] - m) / (i + 1);
-  return m;
-};
-science.stats.median = function(x) {
-  return science.stats.quantiles(x, [.5])[0];
-};
-science.stats.mode = function(x) {
-  x = x.slice().sort(science.ascending);
-  var mode,
-      n = x.length,
-      i = -1,
-      l = i,
-      last = null,
-      max = 0,
-      tmp,
-      v;
-  while (++i < n) {
-    if ((v = x[i]) !== last) {
-      if ((tmp = i - l) > max) {
-        max = tmp;
-        mode = last;
-      }
-      last = v;
-      l = i;
-    }
-  }
-  return mode;
-};
-// Uses R's quantile algorithm type=7.
-science.stats.quantiles = function(d, quantiles) {
-  d = d.slice().sort(science.ascending);
-  var n_1 = d.length - 1;
-  return quantiles.map(function(q) {
-    if (q === 0) return d[0];
-    else if (q === 1) return d[n_1];
-
-    var index = 1 + q * n_1,
-        lo = Math.floor(index),
-        h = index - lo,
-        a = d[lo - 1];
-
-    return h === 0 ? a : a + h * (d[lo] - a);
-  });
-};
-// Unbiased estimate of a sample's variance.
-// Also known as the sample variance, where the denominator is n - 1.
-science.stats.variance = function(x) {
-  var n = x.length;
-  if (n < 1) return NaN;
-  if (n === 1) return 0;
-  var mean = science.stats.mean(x),
-      i = -1,
-      s = 0;
-  while (++i < n) {
-    var v = x[i] - mean;
-    s += v * v;
-  }
-  return s / (n - 1);
-};
-science.stats.distribution = {
-};
-// From http://www.colingodsey.com/javascript-gaussian-random-number-generator/
-// Uses the Box-Muller Transform.
-science.stats.distribution.gaussian = function() {
-  var random = Math.random,
-      mean = 0,
-      sigma = 1,
-      variance = 1;
-
-  function gaussian() {
-    var x1,
-        x2,
-        rad,
-        y1;
-
-    do {
-      x1 = 2 * random() - 1;
-      x2 = 2 * random() - 1;
-      rad = x1 * x1 + x2 * x2;
-    } while (rad >= 1 || rad === 0);
-
-    return mean + sigma * x1 * Math.sqrt(-2 * Math.log(rad) / rad);
-  }
-
-  gaussian.pdf = function(x) {
-    x = (x - mu) / sigma;
-    return science_stats_distribution_gaussianConstant * Math.exp(-.5 * x * x) / sigma;
-  };
-
-  gaussian.cdf = function(x) {
-    x = (x - mu) / sigma;
-    return .5 * (1 + science.stats.erf(x / Math.SQRT2));
-  };
-
-  gaussian.mean = function(x) {
-    if (!arguments.length) return mean;
-    mean = +x;
-    return gaussian;
-  };
-
-  gaussian.variance = function(x) {
-    if (!arguments.length) return variance;
-    sigma = Math.sqrt(variance = +x);
-    return gaussian;
-  };
-
-  gaussian.random = function(x) {
-    if (!arguments.length) return random;
-    random = x;
-    return gaussian;
-  };
-
-  return gaussian;
-};
-
-science_stats_distribution_gaussianConstant = 1 / Math.sqrt(2 * Math.PI);
-})()
\ No newline at end of file
diff --git a/science.stats.min.js b/science.stats.min.js
deleted file mode 100644
index b446a02..0000000
--- a/science.stats.min.js
+++ /dev/null
@@ -1 +0,0 @@
-(function(){function a(a,b){if(!a||!b||a.length!==b.length)return!1;var c=a.length,d=-1;while(++d<c)if(a[d]!==b[d])return!1;return!0}function b(b,c){var d=c.length;if(b>d)return null;var e=[],f=[],g={},h=0,i=0,j,k,l;while(i<b){if(h===d)return null;var m=Math.floor(Math.random()*d);if(m in g)continue;g[m]=1,h++,k=c[m],l=!0;for(j=0;j<i;j++)if(a(k,e[j])){l=!1;break}l&&(e[i]=k,f[i]=m,i++)}return e}function c(a,b,c,d){var e=[],f=a+c,g=b.length,h=-1;while(++h<g)e[h]=(a*b[h]+c*d[h])/f;return e} [...]
\ No newline at end of file
diff --git a/science.lin.js b/science.v1.js
similarity index 51%
rename from science.lin.js
rename to science.v1.js
index 9e8ce16..97c864b 100644
--- a/science.lin.js
+++ b/science.v1.js
@@ -1,4 +1,75 @@
-(function(){science.lin = {};
+(function(){science = {version: "1.9.0"}; // semver
+science.ascending = function(a, b) {
+  return a - b;
+};
+// Euler's constant.
+science.EULER = .5772156649015329;
+// Compute exp(x) - 1 accurately for small x.
+science.expm1 = function(x) {
+  return (x < 1e-5 && x > -1e-5) ? x + .5 * x * x : Math.exp(x) - 1;
+};
+science.functor = function(v) {
+  return typeof v === "function" ? v : function() { return v; };
+};
+// Based on:
+// http://www.johndcook.com/blog/2010/06/02/whats-so-hard-about-finding-a-hypotenuse/
+science.hypot = function(x, y) {
+  x = Math.abs(x);
+  y = Math.abs(y);
+  var max,
+      min;
+  if (x > y) { max = x; min = y; }
+  else       { max = y; min = x; }
+  var r = min / max;
+  return max * Math.sqrt(1 + r * r);
+};
+science.quadratic = function() {
+  var complex = false;
+
+  function quadratic(a, b, c) {
+    var d = b * b - 4 * a * c;
+    if (d > 0) {
+      d = Math.sqrt(d) / (2 * a);
+      return complex
+        ? [{r: -b - d, i: 0}, {r: -b + d, i: 0}]
+        : [-b - d, -b + d];
+    } else if (d === 0) {
+      d = -b / (2 * a);
+      return complex ? [{r: d, i: 0}] : [d];
+    } else {
+      if (complex) {
+        d = Math.sqrt(-d) / (2 * a);
+        return [
+          {r: -b, i: -d},
+          {r: -b, i: d}
+        ];
+      }
+      return [];
+    }
+  }
+
+  quadratic.complex = function(x) {
+    if (!arguments.length) return complex;
+    complex = x;
+    return quadratic;
+  };
+
+  return quadratic;
+};
+// Constructs a multi-dimensional array filled with zeroes.
+science.zeroes = function(n) {
+  var i = -1,
+      a = [];
+  if (arguments.length === 1)
+    while (++i < n)
+      a[i] = 0;
+  else
+    while (++i < n)
+      a[i] = science.zeroes.apply(
+        this, Array.prototype.slice.call(arguments, 1));
+  return a;
+};
+science.lin = {};
 science.lin.decompose = function() {
 
   function decompose(A) {
@@ -725,7 +796,7 @@ science.lin.dot = function(a, b) {
   return s;
 };
 science.lin.length = function(p) {
-  return Math.sqrt(science.vector.dot(p, p));
+  return Math.sqrt(science.lin.dot(p, p));
 };
 science.lin.normalize = function(p) {
   var length = science.lin.length(p);
@@ -805,7 +876,7 @@ science.lin.gaussjordan = function(m, eps) {
 };
 // Find matrix inverse using Gauss-Jordan.
 science.lin.inverse = function(m) {
-  var n = m.length
+  var n = m.length,
       i = -1;
 
   // Check if the matrix is square.
@@ -885,4 +956,780 @@ science.lin.tridag = function(a, b, c, d, x, n) {
     x[i] = (d[i] - c[i] * x[i + 1]) / b[i];
   }
 };
-})()
\ No newline at end of file
+science.stats = {};
+// Bandwidth selectors for Gaussian kernels.
+// Based on R's implementations in `stats.bw`.
+science.stats.bandwidth = {
+
+  // Silverman, B. W. (1986) Density Estimation. London: Chapman and Hall.
+  nrd0: function(x) {
+    var hi = Math.sqrt(science.stats.variance(x));
+    if (!(lo = Math.min(hi, science.stats.iqr(x) / 1.34)))
+      (lo = hi) || (lo = Math.abs(x[1])) || (lo = 1);
+    return .9 * lo * Math.pow(x.length, -.2);
+  },
+
+  // Scott, D. W. (1992) Multivariate Density Estimation: Theory, Practice, and
+  // Visualization. Wiley.
+  nrd: function(x) {
+    var h = science.stats.iqr(x) / 1.34;
+    return 1.06 * Math.min(Math.sqrt(science.stats.variance(x)), h)
+      * Math.pow(x.length, -1/5);
+  }
+};
+science.stats.distance = {
+  euclidean: function(a, b) {
+    var n = a.length,
+        i = -1,
+        s = 0,
+        x;
+    while (++i < n) {
+      x = a[i] - b[i];
+      s += x * x;
+    }
+    return Math.sqrt(s);
+  },
+  manhattan: function(a, b) {
+    var n = a.length,
+        i = -1,
+        s = 0;
+    while (++i < n) s += Math.abs(a[i] - b[i]);
+    return s;
+  },
+  minkowski: function(p) {
+    return function(a, b) {
+      var n = a.length,
+          i = -1,
+          s = 0;
+      while (++i < n) s += Math.pow(Math.abs(a[i] - b[i]), p);
+      return Math.pow(s, 1 / p);
+    };
+  },
+  chebyshev: function(a, b) {
+    var n = a.length,
+        i = -1,
+        max = 0,
+        x;
+    while (++i < n) {
+      x = Math.abs(a[i] - b[i]);
+      if (x > max) max = x;
+    }
+    return max;
+  },
+  hamming: function(a, b) {
+    var n = a.length,
+        i = -1,
+        d = 0;
+    while (++i < n) if (a[i] !== b[i]) d++;
+    return d;
+  },
+  jaccard: function(a, b) {
+    var n = a.length,
+        i = -1,
+        s = 0;
+    while (++i < n) if (a[i] === b[i]) s++;
+    return s / n;
+  },
+  braycurtis: function(a, b) {
+    var n = a.length,
+        i = -1,
+        s0 = 0,
+        s1 = 0,
+        ai,
+        bi;
+    while (++i < n) {
+      ai = a[i];
+      bi = b[i];
+      s0 += Math.abs(ai - bi);
+      s1 += Math.abs(ai + bi);
+    }
+    return s0 / s1;
+  }
+};
+// Based on implementation in http://picomath.org/.
+science.stats.erf = function(x) {
+  var a1 =  0.254829592,
+      a2 = -0.284496736,
+      a3 =  1.421413741,
+      a4 = -1.453152027,
+      a5 =  1.061405429,
+      p  =  0.3275911;
+
+  // Save the sign of x
+  var sign = x < 0 ? -1 : 1;
+  if (x < 0) {
+    sign = -1;
+    x = -x;
+  }
+
+  // A&S formula 7.1.26
+  var t = 1 / (1 + p * x);
+  return sign * (
+    1 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1)
+    * t * Math.exp(-x * x));
+};
+science.stats.phi = function(x) {
+  return .5 * (1 + science.stats.erf(x / Math.SQRT2));
+};
+// See <http://en.wikipedia.org/wiki/Kernel_(statistics)>.
+science.stats.kernel = {
+  uniform: function(u) {
+    if (u <= 1 && u >= -1) return .5;
+    return 0;
+  },
+  triangular: function(u) {
+    if (u <= 1 && u >= -1) return 1 - Math.abs(u);
+    return 0;
+  },
+  epanechnikov: function(u) {
+    if (u <= 1 && u >= -1) return .75 * (1 - u * u);
+    return 0;
+  },
+  quartic: function(u) {
+    if (u <= 1 && u >= -1) {
+      var tmp = 1 - u * u;
+      return (15 / 16) * tmp * tmp;
+    }
+    return 0;
+  },
+  triweight: function(u) {
+    if (u <= 1 && u >= -1) {
+      var tmp = 1 - u * u;
+      return (35 / 32) * tmp * tmp * tmp;
+    }
+    return 0;
+  },
+  gaussian: function(u) {
+    return 1 / Math.sqrt(2 * Math.PI) * Math.exp(-.5 * u * u);
+  },
+  cosine: function(u) {
+    if (u <= 1 && u >= -1) return Math.PI / 4 * Math.cos(Math.PI / 2 * u);
+    return 0;
+  }
+};
+// http://exploringdata.net/den_trac.htm
+science.stats.kde = function() {
+  var kernel = science.stats.kernel.gaussian,
+      sample = [],
+      bandwidth = science.stats.bandwidth.nrd;
+
+  function kde(points, i) {
+    var bw = bandwidth.call(this, sample);
+    return points.map(function(x) {
+      var i = -1,
+          y = 0,
+          n = sample.length;
+      while (++i < n) {
+        y += kernel((x - sample[i]) / bw);
+      }
+      return [x, y / bw / n];
+    });
+  }
+
+  kde.kernel = function(x) {
+    if (!arguments.length) return kernel;
+    kernel = x;
+    return kde;
+  };
+
+  kde.sample = function(x) {
+    if (!arguments.length) return sample;
+    sample = x;
+    return kde;
+  };
+
+  kde.bandwidth = function(x) {
+    if (!arguments.length) return bandwidth;
+    bandwidth = science.functor(x);
+    return kde;
+  };
+
+  return kde;
+};
+// Based on figue implementation by Jean-Yves Delort.
+// http://code.google.com/p/figue/
+science.stats.kmeans = function() {
+  var distance = science.stats.distance.euclidean,
+      maxIterations = 1000,
+      k = 1;
+
+  function kmeans(vectors) {
+    var n = vectors.length,
+        assignments = [],
+        clusterSizes = [],
+        repeat = 1,
+        iterations = 0,
+        centroids = science_stats_kmeansRandom(k, vectors),
+        newCentroids,
+        i,
+        j,
+        x,
+        d,
+        min,
+        best;
+
+    while (repeat && iterations < maxIterations) {
+      // Assignment step.
+      j = -1; while (++j < k) {
+        clusterSizes[j] = 0;
+      }
+
+      i = -1; while (++i < n) {
+        x = vectors[i];
+        min = Infinity;
+        j = -1; while (++j < k) {
+          d = distance.call(this, centroids[j], x);
+          if (d < min) {
+            min = d;
+            best = j;
+          }
+        }
+        clusterSizes[assignments[i] = best]++;
+      }
+
+      // Update centroids step.
+      newCentroids = [];
+      i = -1; while (++i < n) {
+        x = assignments[i];
+        d = newCentroids[x];
+        if (d == null) newCentroids[x] = vectors[i].slice();
+        else {
+          j = -1; while (++j < d.length) {
+            d[j] += vectors[i][j];
+          }
+        }
+      }
+      j = -1; while (++j < k) {
+        x = newCentroids[j];
+        d = 1 / clusterSizes[j];
+        i = -1; while (++i < x.length) x[i] *= d;
+      }
+
+      // Check convergence.
+      repeat = 0;
+      j = -1; while (++j < k) {
+        if (!science_stats_kmeansCompare(newCentroids[j], centroids[j])) {
+          repeat = 1;
+          break;
+        }
+      }
+      centroids = newCentroids;
+      iterations++;
+    }
+    return {assignments: assignments, centroids: centroids};
+  }
+
+  kmeans.k = function(x) {
+    if (!arguments.length) return k;
+    k = x;
+    return kmeans;
+  };
+
+  kmeans.distance = function(x) {
+    if (!arguments.length) return distance;
+    distance = x;
+    return kmeans;
+  };
+
+  return kmeans;
+};
+
+function science_stats_kmeansCompare(a, b) {
+  if (!a || !b || a.length !== b.length) return false;
+  var n = a.length,
+      i = -1;
+  while (++i < n) if (a[i] !== b[i]) return false;
+  return true;
+}
+
+// Returns an array of k distinct vectors randomly selected from the input
+// array of vectors. Returns null if k > n or if there are less than k distinct
+// objects in vectors.
+function science_stats_kmeansRandom(k, vectors) {
+  var n = vectors.length;
+  if (k > n) return null;
+  
+  var selected_vectors = [];
+  var selected_indices = [];
+  var tested_indices = {};
+  var tested = 0;
+  var selected = 0;
+  var i,
+      vector,
+      select;
+
+  while (selected < k) {
+    if (tested === n) return null;
+    
+    var random_index = Math.floor(Math.random() * n);
+    if (random_index in tested_indices) continue;
+    
+    tested_indices[random_index] = 1;
+    tested++;
+    vector = vectors[random_index];
+    select = true;
+    for (i = 0; i < selected; i++) {
+      if (science_stats_kmeansCompare(vector, selected_vectors[i])) {
+        select = false;
+        break;
+      }
+    }
+    if (select) {
+      selected_vectors[selected] = vector;
+      selected_indices[selected] = random_index;
+      selected++;
+    }
+  }
+  return selected_vectors;
+}
+science.stats.hcluster = function() {
+  var distance = science.stats.distance.euclidean,
+      linkage = "simple"; // simple, complete or average
+
+  function hcluster(vectors) {
+    var n = vectors.length,
+        dMin = [],
+        cSize = [],
+        distMatrix = [],
+        clusters = [],
+        c1,
+        c2,
+        c1Cluster,
+        c2Cluster,
+        p,
+        root,
+        i,
+        j;
+
+    // Initialise distance matrix and vector of closest clusters.
+    i = -1; while (++i < n) {
+      dMin[i] = 0;
+      distMatrix[i] = [];
+      j = -1; while (++j < n) {
+        distMatrix[i][j] = i === j ? Infinity : distance(vectors[i] , vectors[j]);
+        if (distMatrix[i][dMin[i]] > distMatrix[i][j]) dMin[i] = j;
+      }
+    }
+
+    // create leaves of the tree
+    i = -1; while (++i < n) {
+      clusters[i] = [];
+      clusters[i][0] = {
+        left: null,
+        right: null,
+        dist: 0,
+        centroid: vectors[i],
+        size: 1,
+        depth: 0
+      };
+      cSize[i] = 1;
+    }
+
+    // Main loop
+    for (p = 0; p < n-1; p++) {
+      // find the closest pair of clusters
+      c1 = 0;
+      for (i = 0; i < n; i++) {
+        if (distMatrix[i][dMin[i]] < distMatrix[c1][dMin[c1]]) c1 = i;
+      }
+      c2 = dMin[c1];
+
+      // create node to store cluster info 
+      c1Cluster = clusters[c1][0];
+      c2Cluster = clusters[c2][0];
+
+      newCluster = {
+        left: c1Cluster,
+        right: c2Cluster,
+        dist: distMatrix[c1][c2],
+        centroid: calculateCentroid(c1Cluster.size, c1Cluster.centroid,
+          c2Cluster.size, c2Cluster.centroid),
+        size: c1Cluster.size + c2Cluster.size,
+        depth: 1 + Math.max(c1Cluster.depth, c2Cluster.depth)
+      };
+      clusters[c1].splice(0, 0, newCluster);
+      cSize[c1] += cSize[c2];
+
+      // overwrite row c1 with respect to the linkage type
+      for (j = 0; j < n; j++) {
+        switch (linkage) {
+          case "single":
+            if (distMatrix[c1][j] > distMatrix[c2][j])
+              distMatrix[j][c1] = distMatrix[c1][j] = distMatrix[c2][j];
+            break;
+          case "complete":
+            if (distMatrix[c1][j] < distMatrix[c2][j])
+              distMatrix[j][c1] = distMatrix[c1][j] = distMatrix[c2][j];
+            break;
+          case "average":
+            distMatrix[j][c1] = distMatrix[c1][j] = (cSize[c1] * distMatrix[c1][j] + cSize[c2] * distMatrix[c2][j]) / (cSize[c1] + cSize[j]);
+            break;
+        }
+      }
+      distMatrix[c1][c1] = Infinity;
+
+      // infinity ­out old row c2 and column c2
+      for (i = 0; i < n; i++)
+        distMatrix[i][c2] = distMatrix[c2][i] = Infinity;
+
+      // update dmin and replace ones that previous pointed to c2 to point to c1
+      for (j = 0; j < n; j++) {
+        if (dMin[j] == c2) dMin[j] = c1;
+        if (distMatrix[c1][j] < distMatrix[c1][dMin[c1]]) dMin[c1] = j;
+      }
+
+      // keep track of the last added cluster
+      root = newCluster;
+    }
+
+    return root;
+  }
+
+  hcluster.distance = function(x) {
+    if (!arguments.length) return distance;
+    distance = x;
+    return hcluster;
+  };
+
+  return hcluster;
+};
+
+function calculateCentroid(c1Size, c1Centroid, c2Size, c2Centroid) {
+  var newCentroid = [],
+      newSize = c1Size + c2Size,
+      n = c1Centroid.length,
+      i = -1;
+  while (++i < n) {
+    newCentroid[i] = (c1Size * c1Centroid[i] + c2Size * c2Centroid[i]) / newSize;
+  }
+  return newCentroid;
+}
+science.stats.iqr = function(x) {
+  var quartiles = science.stats.quantiles(x, [.25, .75]);
+  return quartiles[1] - quartiles[0];
+};
+// Based on org.apache.commons.math.analysis.interpolation.LoessInterpolator
+// from http://commons.apache.org/math/
+science.stats.loess = function() {    
+  var bandwidth = .3,
+      robustnessIters = 2,
+      accuracy = 1e-12;
+
+  function smooth(xval, yval, weights) {
+    var n = xval.length,
+        i;
+
+    if (n !== yval.length) throw {error: "Mismatched array lengths"};
+    if (n == 0) throw {error: "At least one point required."};
+
+    if (arguments.length < 3) {
+      weights = [];
+      i = -1; while (++i < n) weights[i] = 1;
+    }
+
+    science_stats_loessFiniteReal(xval);
+    science_stats_loessFiniteReal(yval);
+    science_stats_loessFiniteReal(weights);
+    science_stats_loessStrictlyIncreasing(xval);
+
+    if (n == 1) return [yval[0]];
+    if (n == 2) return [yval[0], yval[1]];
+
+    var bandwidthInPoints = Math.floor(bandwidth * n);
+
+    if (bandwidthInPoints < 2) throw {error: "Bandwidth too small."};
+
+    var res = [],
+        residuals = [],
+        robustnessWeights = [];
+
+    // Do an initial fit and 'robustnessIters' robustness iterations.
+    // This is equivalent to doing 'robustnessIters+1' robustness iterations
+    // starting with all robustness weights set to 1.
+    i = -1; while (++i < n) {
+      res[i] = 0;
+      residuals[i] = 0;
+      robustnessWeights[i] = 1;
+    }
+
+    var iter = -1;
+    while (++iter <= robustnessIters) {
+      var bandwidthInterval = [0, bandwidthInPoints - 1];
+      // At each x, compute a local weighted linear regression
+      var x;
+      i = -1; while (++i < n) {
+        x = xval[i];
+
+        // Find out the interval of source points on which
+        // a regression is to be made.
+        if (i > 0) {
+          science_stats_loessUpdateBandwidthInterval(xval, weights, i, bandwidthInterval);
+        }
+
+        var ileft = bandwidthInterval[0],
+            iright = bandwidthInterval[1];
+
+        // Compute the point of the bandwidth interval that is
+        // farthest from x
+        var edge = (xval[i] - xval[ileft]) > (xval[iright] - xval[i]) ? ileft : iright;
+
+        // Compute a least-squares linear fit weighted by
+        // the product of robustness weights and the tricube
+        // weight function.
+        // See http://en.wikipedia.org/wiki/Linear_regression
+        // (section "Univariate linear case")
+        // and http://en.wikipedia.org/wiki/Weighted_least_squares
+        // (section "Weighted least squares")
+        var sumWeights = 0,
+            sumX = 0,
+            sumXSquared = 0,
+            sumY = 0,
+            sumXY = 0,
+            denom = Math.abs(1 / (xval[edge] - x));
+
+        for (var k = ileft; k <= iright; ++k) {
+          var xk   = xval[k],
+              yk   = yval[k],
+              dist = k < i ? x - xk : xk - x,
+              w    = science_stats_loessTricube(dist * denom) * robustnessWeights[k] * weights[k],
+              xkw  = xk * w;
+          sumWeights += w;
+          sumX += xkw;
+          sumXSquared += xk * xkw;
+          sumY += yk * w;
+          sumXY += yk * xkw;
+        }
+
+        var meanX = sumX / sumWeights,
+            meanY = sumY / sumWeights,
+            meanXY = sumXY / sumWeights,
+            meanXSquared = sumXSquared / sumWeights;
+
+        var beta = (Math.sqrt(Math.abs(meanXSquared - meanX * meanX)) < accuracy)
+            ? 0 : ((meanXY - meanX * meanY) / (meanXSquared - meanX * meanX));
+
+        var alpha = meanY - beta * meanX;
+
+        res[i] = beta * x + alpha;
+        residuals[i] = Math.abs(yval[i] - res[i]);
+      }
+
+      // No need to recompute the robustness weights at the last
+      // iteration, they won't be needed anymore
+      if (iter === robustnessIters) {
+        break;
+      }
+
+      // Recompute the robustness weights.
+
+      // Find the median residual.
+      var sortedResiduals = residuals.slice();
+      sortedResiduals.sort();
+      var medianResidual = sortedResiduals[Math.floor(n / 2)];
+
+      if (Math.abs(medianResidual) < accuracy)
+        break;
+
+      var arg,
+          w;
+      i = -1; while (++i < n) {
+        arg = residuals[i] / (6 * medianResidual);
+        robustnessWeights[i] = (arg >= 1) ? 0 : ((w = 1 - arg * arg) * w);
+      }
+    }
+
+    return res;
+  }
+
+  smooth.bandwidth = function(x) {
+    if (!arguments.length) return x;
+    bandwidth = x;
+    return smooth;
+  };
+
+  smooth.robustnessIterations = function(x) {
+    if (!arguments.length) return x;
+    robustnessIters = x;
+    return smooth;
+  };
+
+  smooth.accuracy = function(x) {
+    if (!arguments.length) return x;
+    accuracy = x;
+    return smooth;
+  };
+
+  return smooth;
+};
+
+function science_stats_loessFiniteReal(values) {
+  var n = values.length,
+      i = -1;
+
+  while (++i < n) if (!isFinite(values[i])) return false;
+
+  return true;
+}
+
+function science_stats_loessStrictlyIncreasing(xval) {
+  var n = xval.length,
+      i = 0;
+
+  while (++i < n) if (xval[i - 1] >= xval[i]) return false;
+
+  return true;
+}
+
+// Compute the tricube weight function.
+// http://en.wikipedia.org/wiki/Local_regression#Weight_function
+function science_stats_loessTricube(x) {
+  return (x = 1 - x * x * x) * x * x;
+}
+
+// Given an index interval into xval that embraces a certain number of
+// points closest to xval[i-1], update the interval so that it embraces
+// the same number of points closest to xval[i], ignoring zero weights.
+function science_stats_loessUpdateBandwidthInterval(
+  xval, weights, i, bandwidthInterval) {
+
+  var left = bandwidthInterval[0],
+      right = bandwidthInterval[1];
+
+  // The right edge should be adjusted if the next point to the right
+  // is closer to xval[i] than the leftmost point of the current interval
+  var nextRight = science_stats_loessNextNonzero(weights, right);
+  if ((nextRight < xval.length) && (xval[nextRight] - xval[i]) < (xval[i] - xval[left])) {
+    var nextLeft = science_stats_loessNextNonzero(weights, left);
+    bandwidthInterval[0] = nextLeft;
+    bandwidthInterval[1] = nextRight;
+  }
+}
+
+function science_stats_loessNextNonzero(weights, i) {
+  var j = i + 1;
+  while (j < weights.length && weights[j] === 0) j++;
+  return j;
+}
+// Welford's algorithm.
+science.stats.mean = function(x) {
+  var n = x.length;
+  if (n === 0) return NaN;
+  var m = 0,
+      i = -1;
+  while (++i < n) m += (x[i] - m) / (i + 1);
+  return m;
+};
+science.stats.median = function(x) {
+  return science.stats.quantiles(x, [.5])[0];
+};
+science.stats.mode = function(x) {
+  x = x.slice().sort(science.ascending);
+  var mode,
+      n = x.length,
+      i = -1,
+      l = i,
+      last = null,
+      max = 0,
+      tmp,
+      v;
+  while (++i < n) {
+    if ((v = x[i]) !== last) {
+      if ((tmp = i - l) > max) {
+        max = tmp;
+        mode = last;
+      }
+      last = v;
+      l = i;
+    }
+  }
+  return mode;
+};
+// Uses R's quantile algorithm type=7.
+science.stats.quantiles = function(d, quantiles) {
+  d = d.slice().sort(science.ascending);
+  var n_1 = d.length - 1;
+  return quantiles.map(function(q) {
+    if (q === 0) return d[0];
+    else if (q === 1) return d[n_1];
+
+    var index = 1 + q * n_1,
+        lo = Math.floor(index),
+        h = index - lo,
+        a = d[lo - 1];
+
+    return h === 0 ? a : a + h * (d[lo] - a);
+  });
+};
+// Unbiased estimate of a sample's variance.
+// Also known as the sample variance, where the denominator is n - 1.
+science.stats.variance = function(x) {
+  var n = x.length;
+  if (n < 1) return NaN;
+  if (n === 1) return 0;
+  var mean = science.stats.mean(x),
+      i = -1,
+      s = 0;
+  while (++i < n) {
+    var v = x[i] - mean;
+    s += v * v;
+  }
+  return s / (n - 1);
+};
+science.stats.distribution = {
+};
+// From http://www.colingodsey.com/javascript-gaussian-random-number-generator/
+// Uses the Box-Muller Transform.
+science.stats.distribution.gaussian = function() {
+  var random = Math.random,
+      mean = 0,
+      sigma = 1,
+      variance = 1;
+
+  function gaussian() {
+    var x1,
+        x2,
+        rad,
+        y1;
+
+    do {
+      x1 = 2 * random() - 1;
+      x2 = 2 * random() - 1;
+      rad = x1 * x1 + x2 * x2;
+    } while (rad >= 1 || rad === 0);
+
+    return mean + sigma * x1 * Math.sqrt(-2 * Math.log(rad) / rad);
+  }
+
+  gaussian.pdf = function(x) {
+    x = (x - mu) / sigma;
+    return science_stats_distribution_gaussianConstant * Math.exp(-.5 * x * x) / sigma;
+  };
+
+  gaussian.cdf = function(x) {
+    x = (x - mu) / sigma;
+    return .5 * (1 + science.stats.erf(x / Math.SQRT2));
+  };
+
+  gaussian.mean = function(x) {
+    if (!arguments.length) return mean;
+    mean = +x;
+    return gaussian;
+  };
+
+  gaussian.variance = function(x) {
+    if (!arguments.length) return variance;
+    sigma = Math.sqrt(variance = +x);
+    return gaussian;
+  };
+
+  gaussian.random = function(x) {
+    if (!arguments.length) return random;
+    random = x;
+    return gaussian;
+  };
+
+  return gaussian;
+};
+
+science_stats_distribution_gaussianConstant = 1 / Math.sqrt(2 * Math.PI);
+})();
diff --git a/science.v1.min.js b/science.v1.min.js
new file mode 100644
index 0000000..27d4624
--- /dev/null
+++ b/science.v1.min.js
@@ -0,0 +1 @@
+(function(){function a(a,b,c){var d=c.length;for(var e=0;e<d;e++)a[e]=c[d-1][e];for(var f=d-1;f>0;f--){var g=0,h=0;for(var i=0;i<f;i++)g+=Math.abs(a[i]);if(g===0){b[f]=a[f-1];for(var e=0;e<f;e++)a[e]=c[f-1][e],c[f][e]=0,c[e][f]=0}else{for(var i=0;i<f;i++)a[i]/=g,h+=a[i]*a[i];var j=a[f-1],k=Math.sqrt(h);j>0&&(k=-k),b[f]=g*k,h-=j*k,a[f-1]=j-k;for(var e=0;e<f;e++)b[e]=0;for(var e=0;e<f;e++){j=a[e],c[e][f]=j,k=b[e]+c[e][e]*j;for(var i=e+1;i<=f-1;i++)k+=c[i][e]*a[i],b[i]+=c[i][e]*j;b[e]=k}j=0 [...]
\ No newline at end of file
diff --git a/src/core/core.js b/src/core/core.js
index 10ac4ae..7bc07e0 100644
--- a/src/core/core.js
+++ b/src/core/core.js
@@ -1 +1 @@
-science = {version: "1.8.0"}; // semver
+science = {version: "1.9.0"}; // semver
diff --git a/src/end.js b/src/end.js
index 2d9c82a..0319a0f 100644
--- a/src/end.js
+++ b/src/end.js
@@ -1 +1 @@
-})()
\ No newline at end of file
+})();
diff --git a/src/package.js b/src/package.js
index 414a18f..27fa73b 100644
--- a/src/package.js
+++ b/src/package.js
@@ -1,7 +1,7 @@
-require("../science");
+require("../science.v1");
 
 require("util").puts(JSON.stringify({
-  "name": "science.js",
+  "name": "science",
   "version": science.version,
   "description": "Scientific and statistical computing in JavaScript.",
   "keywords": ["science", "statistics", "mathematics"],
@@ -9,7 +9,7 @@ require("util").puts(JSON.stringify({
   "author": {"name": "Jason Davies", "url": "http://www.jasondavies.com/"},
   "repository": {"type": "git", "url": "http://github.com/jasondavies/science.js.git"},
   "devDependencies": {
-    "uglify-js": "1.2.2",
+    "uglify-js": "1.2.6",
     "vows": "0.6.0"
   }
 }, null, 2));
diff --git a/test/core/ascending-test.js b/test/core/ascending-test.js
index 99eab9a..85eabe1 100644
--- a/test/core/ascending-test.js
+++ b/test/core/ascending-test.js
@@ -1,4 +1,4 @@
-require("../../science");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/core/expm1-test.js b/test/core/expm1-test.js
index 07affb3..e717f41 100644
--- a/test/core/expm1-test.js
+++ b/test/core/expm1-test.js
@@ -1,4 +1,4 @@
-require("../../science");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/core/hypot-test.js b/test/core/hypot-test.js
index 8cf5000..07a733a 100644
--- a/test/core/hypot-test.js
+++ b/test/core/hypot-test.js
@@ -1,4 +1,4 @@
-require("../../science");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/core/quadratic-test.js b/test/core/quadratic-test.js
index aec6606..12927f2 100644
--- a/test/core/quadratic-test.js
+++ b/test/core/quadratic-test.js
@@ -1,4 +1,4 @@
-require("../../science");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/core/zeroes-test.js b/test/core/zeroes-test.js
index 7b6ef6b..d86e9f4 100644
--- a/test/core/zeroes-test.js
+++ b/test/core/zeroes-test.js
@@ -1,4 +1,4 @@
-require("../../science");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/lin/decompose-test.js b/test/lin/decompose-test.js
index cd8f965..3409c8a 100644
--- a/test/lin/decompose-test.js
+++ b/test/lin/decompose-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.lin");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/lin/tridag-test.js b/test/lin/tridag-test.js
index a2e43d2..d58b1e6 100644
--- a/test/lin/tridag-test.js
+++ b/test/lin/tridag-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.lin");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/bandwidth-test.js b/test/stats/bandwidth-test.js
index ebc450a..001e904 100644
--- a/test/stats/bandwidth-test.js
+++ b/test/stats/bandwidth-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/distance-test.js b/test/stats/distance-test.js
index bcfaa2d..d5342d4 100644
--- a/test/stats/distance-test.js
+++ b/test/stats/distance-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/distribution/gaussian-test.js b/test/stats/distribution/gaussian-test.js
index 96ef965..ae3b4b5 100644
--- a/test/stats/distribution/gaussian-test.js
+++ b/test/stats/distribution/gaussian-test.js
@@ -1,5 +1,5 @@
-require("../../../science");
-require("../../../science.stats");
+require("../../../science.v1");
+require("../../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/erf-test.js b/test/stats/erf-test.js
index a1fd583..fb74df8 100644
--- a/test/stats/erf-test.js
+++ b/test/stats/erf-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/hcluster-test.js b/test/stats/hcluster-test.js
index 8a24edb..0466971 100644
--- a/test/stats/hcluster-test.js
+++ b/test/stats/hcluster-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/iqr-test.js b/test/stats/iqr-test.js
index c6bdc61..b008919 100644
--- a/test/stats/iqr-test.js
+++ b/test/stats/iqr-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/kmeans-test.js b/test/stats/kmeans-test.js
index b608c93..3feff33 100644
--- a/test/stats/kmeans-test.js
+++ b/test/stats/kmeans-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/loess-test.js b/test/stats/loess-test.js
index d8355e2..aaec7af 100644
--- a/test/stats/loess-test.js
+++ b/test/stats/loess-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/mean-test.js b/test/stats/mean-test.js
index 33c9e20..33be3d5 100644
--- a/test/stats/mean-test.js
+++ b/test/stats/mean-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/median-test.js b/test/stats/median-test.js
index 9b3e361..bbe28a8 100644
--- a/test/stats/median-test.js
+++ b/test/stats/median-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/mode-test.js b/test/stats/mode-test.js
index 8e9f661..36c7ed9 100644
--- a/test/stats/mode-test.js
+++ b/test/stats/mode-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/phi-test.js b/test/stats/phi-test.js
index f029436..72342a2 100644
--- a/test/stats/phi-test.js
+++ b/test/stats/phi-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");
diff --git a/test/stats/variance-test.js b/test/stats/variance-test.js
index 88adaea..2f366d3 100644
--- a/test/stats/variance-test.js
+++ b/test/stats/variance-test.js
@@ -1,5 +1,5 @@
-require("../../science");
-require("../../science.stats");
+require("../../science.v1");
+require("../../science.v1");
 
 var vows = require("vows"),
     assert = require("assert");

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/science.js.git



More information about the Pkg-javascript-commits mailing list