[Pkg-javascript-commits] [libjs-webrtc-adapter] 01/02: Import Upstream version 4.1.0+dfsg1

Johannes Schauer josch at moszumanska.debian.org
Wed Jul 5 19:05:49 UTC 2017


This is an automated email from the git hooks/post-receive script.

josch pushed a commit to branch master
in repository libjs-webrtc-adapter.

commit 5b4da5d4571062e7d046fe5faaa6f89fcf56f9bc
Author: Johannes Schauer <josch at debian.org>
Date:   Wed Jul 5 21:05:08 2017 +0200

    Import Upstream version 4.1.0+dfsg1
---
 .eslintrc                             |   61 ++
 .gitignore                            |    9 +
 .npmignore                            |   10 +
 .travis.yml                           |   52 ++
 CONTRIBUTING.md                       |   15 +
 Gruntfile.js                          |   82 ++
 ISSUE_TEMPLATE.md                     |   13 +
 LICENSE.md                            |   29 +
 PULL_REQUEST_TEMPLATE.md              |    4 +
 README-w3c-tests.md                   |   14 +
 README.md                             |   67 ++
 bower.json                            |   31 +
 package.json                          |   58 ++
 src/js/adapter_core.js                |   13 +
 src/js/adapter_factory.js             |  121 +++
 src/js/chrome/chrome_shim.js          |  506 ++++++++++++
 src/js/chrome/getusermedia.js         |  235 ++++++
 src/js/edge/edge_shim.js              |   80 ++
 src/js/edge/getusermedia.js           |   34 +
 src/js/edge/rtcpeerconnection_shim.js | 1389 ++++++++++++++++++++++++++++++++
 src/js/firefox/firefox_shim.js        |  198 +++++
 src/js/firefox/getusermedia.js        |  209 +++++
 src/js/safari/safari_shim.js          |  251 ++++++
 src/js/utils.js                       |  197 +++++
 test/.eslintrc                        |    7 +
 test/README.md                        |   77 ++
 test/e2e/browserdetails.js            |   29 +
 test/e2e/connection.js                |  288 +++++++
 test/e2e/getusermedia.js              |   42 +
 test/e2e/mediastream.js               |   15 +
 test/e2e/ontrack.js                   |   84 ++
 test/e2e/rtcicecandidate.js           |   15 +
 test/e2e/rtcpeerconnection.js         |   28 +
 test/e2e/rtcsessiondescription.js     |   15 +
 test/e2e/srcobject.js                 |   48 ++
 test/getusermedia-mocha.js            |   57 ++
 test/karma.conf.js                    |   79 ++
 test/run-tests.js                     |   71 ++
 test/selenium-lib.js                  |  154 ++++
 test/test.js                          | 1343 +++++++++++++++++++++++++++++++
 test/testpage.html                    |   19 +
 test/unit/.eslintrc                   |    7 +
 test/unit/adapter_factory.js          |   40 +
 test/unit/chrome.js                   |   28 +
 test/unit/detectBrowser.js            |   74 ++
 test/unit/edge.js                     | 1400 +++++++++++++++++++++++++++++++++
 test/unit/extractVersion.js           |  167 ++++
 test/unit/firefox.js                  |   37 +
 test/unit/getusermedia-constraints.js |  216 +++++
 test/unit/logSuppression.js           |   44 ++
 test/unit/safari.js                   |  188 +++++
 51 files changed, 8250 insertions(+)

diff --git a/.eslintrc b/.eslintrc
new file mode 100644
index 0000000..d9e3658
--- /dev/null
+++ b/.eslintrc
@@ -0,0 +1,61 @@
+{
+  "rules": {
+    "array-bracket-spacing": 2,
+    "block-spacing": [2, "never"],
+    "brace-style": [2, "1tbs", {"allowSingleLine": false}],
+    "camelcase": [2, {"properties": "always"}],
+    "curly": 2,
+    "default-case": 2,
+    "dot-notation": 2,
+    "eqeqeq": 2,
+    "indent": [
+        2,
+        2,
+        {"SwitchCase": 1}
+    ],
+    "key-spacing": [2, {"beforeColon": false, "afterColon": true}],
+    "keyword-spacing": 2,
+    "max-len": [2, 80, 2, {"ignoreUrls": true}],
+    "new-cap": [2, {"newIsCapExceptions": [
+        "webkitRTCPeerConnection",
+        "mozRTCPeerConnection"
+    ]}],
+    "no-console": 0,
+    "no-else-return": 2,
+    "no-eval": 2,
+    "no-multi-spaces": 2,
+    "no-multiple-empty-lines": [2, {"max": 2}],
+    "no-shadow": 2,
+    "no-trailing-spaces": 2,
+    "no-unused-expressions": 2,
+    "no-unused-vars": [2, {"args": "none"}],
+    "object-curly-spacing": [2, "never"],
+    "padded-blocks": [2, "never"],
+    "quotes": [
+        2,
+        "single"
+    ],
+    "semi": [
+        2,
+        "always"
+    ],
+    "space-before-blocks": 2,
+    "space-before-function-paren": [2, "never"],
+    "space-unary-ops": 2,
+    "space-infix-ops": 2,
+    "spaced-comment": 2,
+    "valid-typeof": 2
+  },
+  "env": {
+      "browser": true,
+      "node": true
+  },
+  "extends": ["eslint:recommended", "webrtc"],
+  "globals": {
+    "module": true,
+    "require": true,
+    "process": true,
+    "Promise": true,
+    "Map": true
+  }
+}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..3cbe424
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,9 @@
+browsers/
+firefox-*.tar.bz2
+.DS_Store
+node_modules/
+out/
+validation-report.json
+validation-status.json
+npm-debug.log
+*~
diff --git a/.npmignore b/.npmignore
new file mode 100644
index 0000000..7f543ca
--- /dev/null
+++ b/.npmignore
@@ -0,0 +1,10 @@
+browsers/
+firefox-*.tar.bz2
+.DS_Store
+node_modules/
+validation-report.json
+validation-status.json
+npm-debug.log
+*~
+release/
+
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..d3f7bfd
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,52 @@
+dist: trusty
+language: node_js
+node_js:
+- 6
+
+addons:
+  apt:
+    packages:
+      - pulseaudio
+
+env:
+  global:
+    - DISPLAY=:99.0
+
+  matrix:
+    - BROWSER=chrome  BVER=stable
+    - BROWSER=chrome  BVER=stable CHROMEEXPERIMENT=false
+    - BROWSER=chrome  BVER=beta
+    - BROWSER=chrome  BVER=beta CHROMEEXPERIMENT=false
+    - BROWSER=chrome  BVER=unstable
+    - BROWSER=firefox BVER=stable
+    - BROWSER=firefox BVER=beta
+    - BROWSER=firefox BVER=nightly
+    - BROWSER=firefox BVER=esr
+
+matrix:
+  fast_finish: true
+
+  allow_failures:
+    - env: BROWSER=chrome  BVER=unstable
+    - env: BROWSER=firefox BVER=nightly
+
+before_script:
+  - ./node_modules/travis-multirunner/setup.sh
+  - export CHROME_BIN=browsers/bin/chrome-${BVER}
+  - export FIREFOX_BIN=browsers/bin/firefox-${BVER}
+  - sh -e /etc/init.d/xvfb start
+  - pulseaudio --start
+
+script:
+  - node_modules/.bin/grunt
+  - npm test
+
+after_failure:
+  - for file in *.log; do echo $file; echo "======================"; cat $file; done || true
+
+notifications:
+  email:
+    recipients:
+      forward-webrtc-github at webrtc.org
+    on_success: change
+    on_failure: always
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..c86d36e
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,15 @@
+WebRTC welcomes patches/pulls for features and bug fixes.
+
+For contributors external to Google, follow the instructions given in the [Google Individual Contributor License Agreement](https://cla.developers.google.com/about/google-individual).
+
+In all cases, contributors must sign a contributor license agreement before a contribution can be accepted. Please complete the agreement for an [individual](https://developers.google.com/open-source/cla/individual) or a [corporation](https://developers.google.com/open-source/cla/corporate) as appropriate.
+
+If you plan to add a significant component or large chunk of code, we recommend you bring this up on the [webrtc-discuss group](https://groups.google.com/forum/#!forum/discuss-webrtc) for a design discussion before writing code.
+
+If appropriate, write a unit test which demonstrates that your code functions as expected. Tests are the best way to ensure that future contributors do not break your code accidentally.
+
+To request a change or addition, you must [submit a pull request](https://help.github.com/categories/collaborating/).
+
+WebRTC developers monitor outstanding pull requests. They may request changes to the pull request before accepting. They will also verify that a CLA has been signed.
+
+The [Developer's Guide](https://bit.ly/webrtcdevguide) for this repo has more detailed information about code style, structure and validation.
diff --git a/Gruntfile.js b/Gruntfile.js
new file mode 100644
index 0000000..5b3c4e5
--- /dev/null
+++ b/Gruntfile.js
@@ -0,0 +1,82 @@
+'use strict';
+
+module.exports = function(grunt) {
+  grunt.initConfig({
+    pkg: grunt.file.readJSON('package.json'),
+    browserify: {
+      adapterGlobalObject: {
+        src: ['./src/js/adapter_core.js'],
+        dest: './out/adapter.js',
+        options: {
+          browserifyOptions: {
+            // Exposes shim methods in a global object to the browser.
+            // The tests require this.
+            standalone: 'adapter'
+          }
+        }
+      },
+      // Use this if you do not want adapter to expose anything to the global
+      // scope.
+      adapterAndNoGlobalObject: {
+        src: ['./src/js/adapter_core.js'],
+        dest: './out/adapter_no_global.js'
+      },
+      // Use this if you do not want Microsoft Edge shim to be included.
+      adapterNoEdge: {
+        src: ['./src/js/adapter_core.js'],
+        dest: './out/adapter_no_edge.js',
+        options: {
+          // These files will be skipped.
+          ignore: [
+            './src/js/edge/edge_shim.js'
+          ],
+          browserifyOptions: {
+            // Exposes the shim in a global object to the browser.
+            standalone: 'adapter'
+          }
+        }
+      },
+      // Use this if you do not want Microsoft Edge shim to be included and
+      // do not want adapter to expose anything to the global scope.
+      adapterNoEdgeAndNoGlobalObject: {
+        src: ['./src/js/adapter_core.js'],
+        dest: './out/adapter_no_edge_no_global.js',
+        options: {
+          ignore: [
+            './src/js/edge/edge_shim.js'
+          ]
+        }
+      }
+    },
+    githooks: {
+      all: {
+        'pre-commit': 'lint'
+      }
+    },
+    eslint: {
+      options: {
+        configFile: '.eslintrc'
+      },
+      target: ['src/**/*.js', 'test/*.js', 'test/unit/*.js']
+    },
+    copy: {
+      build: {
+        dest: 'release/',
+        cwd: 'out',
+        src: '**',
+        nonull: true,
+        expand: true
+      }
+    },
+  });
+
+  grunt.loadNpmTasks('grunt-githooks');
+  grunt.loadNpmTasks('grunt-eslint');
+  grunt.loadNpmTasks('grunt-browserify');
+  grunt.loadNpmTasks('grunt-contrib-copy');
+
+  grunt.registerTask('default', ['eslint', 'browserify']);
+  grunt.registerTask('lint', ['eslint']);
+  grunt.registerTask('build', ['browserify']);
+  grunt.registerTask('copyForPublish', ['copy']);
+};
diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000..bb734d6
--- /dev/null
+++ b/ISSUE_TEMPLATE.md
@@ -0,0 +1,13 @@
+**Browsers and versions affected**
+
+
+**Description**
+
+
+**Steps to reproduce**
+
+
+**Expected results**
+
+
+**Actual results**
diff --git a/LICENSE.md b/LICENSE.md
new file mode 100644
index 0000000..c768cfb
--- /dev/null
+++ b/LICENSE.md
@@ -0,0 +1,29 @@
+Copyright (c) 2014, The WebRTC project authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+  * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+  * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+
+  * Neither the name of Google nor the names of its contributors may
+    be used to endorse or promote products derived from this software
+    without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..13f03c0
--- /dev/null
+++ b/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,4 @@
+**Description**
+
+
+**Purpose**
diff --git a/README-w3c-tests.md b/README-w3c-tests.md
new file mode 100644
index 0000000..308bb36
--- /dev/null
+++ b/README-w3c-tests.md
@@ -0,0 +1,14 @@
+How to use adapter with W3C tests
+---------------------------------
+
+If you want to test that the adapter works with the W3C tests, execute
+the following (where TESTDIR is the root of the [web-platform-tests](https://github.com/w3c/web-platform-tests) repo):
+
+- (cd $TESTDIR; git checkout master; git checkout -b some-unused-branch-name)
+- cat adapter.js > $TESTDIR/common/vendor-prefix.js
+- Run the tests according to $TESTDIR/README.md
+
+WebRTC-specific tests are found in "mediacapture-streams" and "webrtc".
+With the adapter installed, the tests should run *without* vendor prefixes.
+
+Note: Not all of the W3C tests are updated to be spec-conformant.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..3804b4a
--- /dev/null
+++ b/README.md
@@ -0,0 +1,67 @@
+[![Build Status](https://travis-ci.org/webrtc/adapter.svg)](https://travis-ci.org/webrtc/adapter)
+
+# WebRTC adapter #
+adapter.js is a shim to insulate apps from spec changes and prefix differences. In fact, the standards and protocols used for WebRTC implementations are highly stable, and there are only a few prefixed names. For full interop information, see [webrtc.org/web-apis/interop](https://www.webrtc.org/web-apis/interop).
+
+## Install ##
+
+#### NPM
+```bash
+npm install webrtc-adapter
+```
+
+#### Bower
+```bash
+bower install webrtc-adapter
+```
+
+## Usage ##
+##### NPM
+Copy to desired location in your src tree or use a minify/vulcanize tool (node_modules is usually not published with the code).
+See [webrtc/samples repo](https://github.com/webrtc/samples/blob/gh-pages/package.json) as an example on how you can do this.
+
+#### Prebuilt releases
+##### Web
+In the [gh-pages branch](https://github.com/webrtc/adapter/tree/gh-pages) prebuilt ready to use files can be downloaded/linked directly.
+Latest version can be found at https://webrtc.github.io/adapter/adapter-latest.js.
+Specific versions can be found at https://webrtc.github.io/adapter/adapter-N.N.N.js, e.g. https://webrtc.github.io/adapter/adapter-1.0.2.js.
+
+##### Bower
+You will find `adapter.js` in `bower_components/webrtc-adapter/`.
+
+##### NPM
+In node_modules/webrtc-adapter/out/ folder you will find 4 files:
+* `adapter.js` - includes all the shims and is visible in the browser under the global `adapter` object (window.adapter).
+* `adapter_no_edge.js` - same as above but does not include the Microsoft Edge (ORTC) shim.
+* `adapter_no_edge_no_global.js` - same as above but is not exposed/visible in the browser (you cannot call/interact with the shims in the browser).
+* `adapter_no_global.js` - same as `adapter.js` but is not exposed/visible in the browser (you cannot call/interact with the shims in the browser).
+
+Include the file that suits your need in your project.
+
+## Development ##
+Detailed information on developing in the [webrtc](https://github.com/webrtc) github repo can be found in the [WebRTC GitHub repo developer's guide](https://docs.google.com/document/d/1tn1t6LW2ffzGuYTK3366w1fhTkkzsSvHsBnOHoDfRzY/edit?pli=1#heading=h.e3366rrgmkdk).
+
+Head over to [test/README.md](https://github.com/webrtc/samples/blob/gh-pages/test/README.md) and get started developing.
+
+## Publish a new version ##
+* Go the the adapter repository root directory
+* Make sure your repository is clean, i.e. no untracked files etc. Also check that you are on the master branch and have pulled the latest changes.
+* Depending on the impact of the release, either use `patch`, `minor` or `major` in place of `<version>`. Run `npm version <version> -m 'bump to %s'` and type in your password lots of times (setting up credential caching is probably a good idea).
+* Create and merge the PR if green in the GitHub web ui
+* Go to the releases tab in the GitHub web ui and edit the tag.
+* Add a summary of the recent commits in the tag summary and a link to the diff between the previous and current version in the description, [example](https://github.com/webrtc/adapter/releases/tag/v3.4.1).
+* Go back to your checkout and run `git pull`
+* Run `npm publish` (you need access to the [webrtc-adapter npmjs package](https://www.npmjs.com/package/webrtc-adapter))
+* Done! There should now be a new release published to NPM and the gh-pages branch.
+
+Note: Currently only tested on Linux, not sure about Mac but will definitely not work on Windows.
+
+### Publish a hotfix patch versions
+In some cases it may be necessary to do a patch version while there are significant changes changes on the master branch.
+To make a patch release,
+* checkout the latest git tag using `git checkout tags/vMajor.minor.patch`.
+* checkout a new branch, using a name such as patchrelease-major-minor-patch. 
+* cherry-pick the fixes using `git cherry-pick some-commit-hash`.
+* run `npm version patch`. This will create a new patch version and publish it on github.
+* check out the branch created earlier and publish the new version using `npm publish`.
+* the branch can now safely be deleted. It is not necessary to merge it into the main branch since it only contains cherry-picked commits.
diff --git a/bower.json b/bower.json
new file mode 100644
index 0000000..0722311
--- /dev/null
+++ b/bower.json
@@ -0,0 +1,31 @@
+{
+  "name": "webrtc-adapter",
+  "description": "A shim to insulate apps from WebRTC spec changes and browser prefix differences",
+  "license": "BSD-3-Clause",
+  "main": "./release/adapter.js",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/webrtc/adapter.git"
+  },
+  "authors": [
+    "The WebRTC project authors (https://www.webrtc.org/)"
+  ],
+  "moduleType": [
+    "node"
+  ],
+  "ignore": [
+    "test/*"
+  ],
+  "keywords": [
+    "WebRTC",
+    "PeerConnection",
+    "RTCPeerConnection",
+    "getUserMedia",
+    "Chrome",
+    "Chromium",
+    "Firefox",
+    "Edge",
+    "Adapter",
+    "Shim"
+  ]
+}
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..70c3248
--- /dev/null
+++ b/package.json
@@ -0,0 +1,58 @@
+{
+  "name": "webrtc-adapter",
+  "version": "4.1.0",
+  "description": "A shim to insulate apps from WebRTC spec changes and browser prefix differences",
+  "license": "BSD-3-Clause",
+  "main": "./src/js/adapter_core.js",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/webrtc/adapter.git"
+  },
+  "authors": [
+    "The WebRTC project authors (https://www.webrtc.org/)"
+  ],
+  "scripts": {
+    "preversion": "git stash && npm install && npm test | faucet && git checkout -B bumpVersion && grunt build && grunt copyForPublish && git add package.json release/* && git commit -m 'Add adapter artifacts' --allow-empty",
+    "version": "",
+    "postversion": "export GITTAG=\"echo $(git describe --abbrev=0 --tags | sed 's/^v//')\" && git push --force --set-upstream origin bumpVersion --follow-tags && git checkout gh-pages && git pull && cp out/adapter.js adapter.js && cp adapter.js adapter-`$GITTAG`.js && rm adapter-latest.js && ln -s adapter-`$GITTAG`.js adapter-latest.js && mkdir -p adapter-`$GITTAG`-variants && cp out/adapter.js adapter-`$GITTAG`-variants/ && cp out/adapter_*.js adapter-`$GITTAG`-variants/ && git add ada [...]
+    "prepublish": "grunt build",
+    "test": "grunt && mocha test/unit && karma start test/karma.conf.js && node test/run-tests.js"
+  },
+  "dependencies": {
+    "sdp": "^2.1.0"
+  },
+  "engines": {
+    "npm": ">=3.10.0",
+    "node": ">=6.0.0"
+  },
+  "devDependencies": {
+    "brfs": "^1.4.3",
+    "chai": "^3.5.0",
+    "chromedriver": "^2.29.0",
+    "eslint-config-webrtc": "^1.0.0",
+    "faucet": "0.0.1",
+    "geckodriver": "1.4.0",
+    "grunt": "^0.4.5",
+    "grunt-browserify": "^4.0.1",
+    "grunt-cli": ">=0.1.9",
+    "grunt-contrib-clean": "^1.0.0",
+    "grunt-contrib-copy": "^1.0.0",
+    "grunt-eslint": "^19.0.0",
+    "grunt-githooks": "^0.3.1",
+    "karma": "^1.7.0",
+    "karma-browserify": "^5.1.1",
+    "karma-chai": "^0.1.0",
+    "karma-chrome-launcher": "^2.2.0",
+    "karma-edge-launcher": "^0.4.1",
+    "karma-firefox-launcher": "^1.0.1",
+    "karma-mocha": "^1.3.0",
+    "karma-mocha-reporter": "^2.2.3",
+    "karma-safari-launcher": "^1.0.0",
+    "mocha": "^3.2.0",
+    "selenium-webdriver": "3.3.0",
+    "sinon": "^2.2.0",
+    "sinon-chai": "^2.10.0",
+    "tape": "^4.0.0",
+    "travis-multirunner": "^3.0.1"
+  }
+}
diff --git a/src/js/adapter_core.js b/src/js/adapter_core.js
new file mode 100644
index 0000000..7ae9db7
--- /dev/null
+++ b/src/js/adapter_core.js
@@ -0,0 +1,13 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+
+'use strict';
+
+var adapterFactory = require('./adapter_factory.js');
+module.exports = adapterFactory({window: global.window});
diff --git a/src/js/adapter_factory.js b/src/js/adapter_factory.js
new file mode 100644
index 0000000..253dde9
--- /dev/null
+++ b/src/js/adapter_factory.js
@@ -0,0 +1,121 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+
+'use strict';
+
+// Shimming starts here.
+module.exports = function(dependencies, opts) {
+  var window = dependencies && dependencies.window;
+
+  var options = Object.assign({
+    shimChrome: true,
+    shimFirefox: true,
+    shimEdge: true,
+    shimSafari: true,
+  }, opts);
+
+  // Utils.
+  var utils = require('./utils');
+  var logging = utils.log;
+  var browserDetails = utils.detectBrowser(window);
+
+  // Export to the adapter global object visible in the browser.
+  var adapter = {
+    browserDetails: browserDetails,
+    extractVersion: utils.extractVersion,
+    disableLog: utils.disableLog,
+    disableWarnings: utils.disableWarnings
+  };
+
+  // Uncomment the line below if you want logging to occur, including logging
+  // for the switch statement below. Can also be turned on in the browser via
+  // adapter.disableLog(false), but then logging from the switch statement below
+  // will not appear.
+  // require('./utils').disableLog(false);
+
+  // Browser shims.
+  var chromeShim = require('./chrome/chrome_shim') || null;
+  var edgeShim = require('./edge/edge_shim') || null;
+  var firefoxShim = require('./firefox/firefox_shim') || null;
+  var safariShim = require('./safari/safari_shim') || null;
+
+  // Shim browser if found.
+  switch (browserDetails.browser) {
+    case 'chrome':
+      if (!chromeShim || !chromeShim.shimPeerConnection ||
+          !options.shimChrome) {
+        logging('Chrome shim is not included in this adapter release.');
+        return adapter;
+      }
+      logging('adapter.js shimming chrome.');
+      // Export to the adapter global object visible in the browser.
+      adapter.browserShim = chromeShim;
+
+      chromeShim.shimGetUserMedia(window);
+      chromeShim.shimMediaStream(window);
+      utils.shimCreateObjectURL(window);
+      chromeShim.shimSourceObject(window);
+      chromeShim.shimPeerConnection(window);
+      chromeShim.shimOnTrack(window);
+      chromeShim.shimAddTrack(window);
+      chromeShim.shimGetSendersWithDtmf(window);
+      break;
+    case 'firefox':
+      if (!firefoxShim || !firefoxShim.shimPeerConnection ||
+          !options.shimFirefox) {
+        logging('Firefox shim is not included in this adapter release.');
+        return adapter;
+      }
+      logging('adapter.js shimming firefox.');
+      // Export to the adapter global object visible in the browser.
+      adapter.browserShim = firefoxShim;
+
+      firefoxShim.shimGetUserMedia(window);
+      utils.shimCreateObjectURL(window);
+      firefoxShim.shimSourceObject(window);
+      firefoxShim.shimPeerConnection(window);
+      firefoxShim.shimOnTrack(window);
+      break;
+    case 'edge':
+      if (!edgeShim || !edgeShim.shimPeerConnection || !options.shimEdge) {
+        logging('MS edge shim is not included in this adapter release.');
+        return adapter;
+      }
+      logging('adapter.js shimming edge.');
+      // Export to the adapter global object visible in the browser.
+      adapter.browserShim = edgeShim;
+
+      edgeShim.shimGetUserMedia(window);
+      utils.shimCreateObjectURL(window);
+      edgeShim.shimPeerConnection(window);
+      edgeShim.shimReplaceTrack(window);
+      break;
+    case 'safari':
+      if (!safariShim || !options.shimSafari) {
+        logging('Safari shim is not included in this adapter release.');
+        return adapter;
+      }
+      logging('adapter.js shimming safari.');
+      // Export to the adapter global object visible in the browser.
+      adapter.browserShim = safariShim;
+      // shim window.URL.createObjectURL Safari (technical preview)
+      utils.shimCreateObjectURL(window);
+      safariShim.shimRTCIceServerUrls(window);
+      safariShim.shimCallbacksAPI(window);
+      safariShim.shimLocalStreamsAPI(window);
+      safariShim.shimRemoteStreamsAPI(window);
+      safariShim.shimGetUserMedia(window);
+      break;
+    default:
+      logging('Unsupported browser!');
+      break;
+  }
+
+  return adapter;
+};
diff --git a/src/js/chrome/chrome_shim.js b/src/js/chrome/chrome_shim.js
new file mode 100644
index 0000000..2b2dd69
--- /dev/null
+++ b/src/js/chrome/chrome_shim.js
@@ -0,0 +1,506 @@
+
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+var utils = require('../utils.js');
+var logging = utils.log;
+
+var chromeShim = {
+  shimMediaStream: function(window) {
+    window.MediaStream = window.MediaStream || window.webkitMediaStream;
+  },
+
+  shimOnTrack: function(window) {
+    if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
+        window.RTCPeerConnection.prototype)) {
+      Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
+        get: function() {
+          return this._ontrack;
+        },
+        set: function(f) {
+          var self = this;
+          if (this._ontrack) {
+            this.removeEventListener('track', this._ontrack);
+            this.removeEventListener('addstream', this._ontrackpoly);
+          }
+          this.addEventListener('track', this._ontrack = f);
+          this.addEventListener('addstream', this._ontrackpoly = function(e) {
+            // onaddstream does not fire when a track is added to an existing
+            // stream. But stream.onaddtrack is implemented so we use that.
+            e.stream.addEventListener('addtrack', function(te) {
+              var receiver;
+              if (window.RTCPeerConnection.prototype.getReceivers) {
+                receiver = self.getReceivers().find(function(r) {
+                  return r.track.id === te.track.id;
+                });
+              } else {
+                receiver = {track: te.track};
+              }
+
+              var event = new Event('track');
+              event.track = te.track;
+              event.receiver = receiver;
+              event.streams = [e.stream];
+              self.dispatchEvent(event);
+            });
+            e.stream.getTracks().forEach(function(track) {
+              var receiver;
+              if (window.RTCPeerConnection.prototype.getReceivers) {
+                receiver = self.getReceivers().find(function(r) {
+                  return r.track.id === track.id;
+                });
+              } else {
+                receiver = {track: track};
+              }
+              var event = new Event('track');
+              event.track = track;
+              event.receiver = receiver;
+              event.streams = [e.stream];
+              this.dispatchEvent(event);
+            }.bind(this));
+          }.bind(this));
+        }
+      });
+    }
+  },
+
+  shimGetSendersWithDtmf: function(window) {
+    if (typeof window === 'object' && window.RTCPeerConnection &&
+        !('getSenders' in window.RTCPeerConnection.prototype) &&
+        'createDTMFSender' in window.RTCPeerConnection.prototype) {
+      var shimSenderWithDtmf = function(pc, track) {
+        return {
+          track: track,
+          get dtmf() {
+            if (this._dtmf === undefined) {
+              if (track.kind === 'audio') {
+                this._dtmf = pc.createDTMFSender(track);
+              } else {
+                this._dtmf = null;
+              }
+            }
+            return this._dtmf;
+          }
+        };
+      };
+
+      // shim addTrack when getSenders is not available.
+      if (!window.RTCPeerConnection.prototype.getSenders) {
+        window.RTCPeerConnection.prototype.getSenders = function() {
+          return this._senders || [];
+        };
+        var origAddTrack = window.RTCPeerConnection.prototype.addTrack;
+        window.RTCPeerConnection.prototype.addTrack = function(track, stream) {
+          var pc = this;
+          var sender = origAddTrack.apply(pc, arguments);
+          if (!sender) {
+            sender = shimSenderWithDtmf(pc, track);
+            pc._senders.push(sender);
+          }
+          return sender;
+        };
+      }
+      var origAddStream = window.RTCPeerConnection.prototype.addStream;
+      window.RTCPeerConnection.prototype.addStream = function(stream) {
+        var pc = this;
+        pc._senders = pc._senders || [];
+        origAddStream.apply(pc, [stream]);
+        stream.getTracks().forEach(function(track) {
+          pc._senders.push(shimSenderWithDtmf(pc, track));
+        });
+      };
+
+      var origRemoveStream = window.RTCPeerConnection.prototype.removeStream;
+      window.RTCPeerConnection.prototype.removeStream = function(stream) {
+        var pc = this;
+        pc._senders = pc._senders || [];
+        origRemoveStream.apply(pc, [(pc._streams[stream.id] || stream)]);
+
+        stream.getTracks().forEach(function(track) {
+          var sender = pc._senders.find(function(s) {
+            return s.track === track;
+          });
+          if (sender) {
+            pc._senders.splice(pc._senders.indexOf(sender), 1); // remove sender
+          }
+        });
+      };
+    } else if (typeof window === 'object' && window.RTCPeerConnection &&
+               'getSenders' in window.RTCPeerConnection.prototype &&
+               'createDTMFSender' in window.RTCPeerConnection.prototype &&
+               window.RTCRtpSender &&
+               !('dtmf' in window.RTCRtpSender.prototype)) {
+      var origGetSenders = window.RTCPeerConnection.prototype.getSenders;
+      window.RTCPeerConnection.prototype.getSenders = function() {
+        var pc = this;
+        var senders = origGetSenders.apply(pc, []);
+        senders.forEach(function(sender) {
+          sender._pc = pc;
+        });
+        return senders;
+      };
+
+      Object.defineProperty(window.RTCRtpSender.prototype, 'dtmf', {
+        get: function() {
+          if (this._dtmf === undefined) {
+            if (this.track.kind === 'audio') {
+              this._dtmf = this._pc.createDTMFSender(this.track);
+            } else {
+              this._dtmf = null;
+            }
+          }
+          return this._dtmf;
+        },
+      });
+    }
+  },
+
+  shimSourceObject: function(window) {
+    var URL = window && window.URL;
+
+    if (typeof window === 'object') {
+      if (window.HTMLMediaElement &&
+        !('srcObject' in window.HTMLMediaElement.prototype)) {
+        // Shim the srcObject property, once, when HTMLMediaElement is found.
+        Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
+          get: function() {
+            return this._srcObject;
+          },
+          set: function(stream) {
+            var self = this;
+            // Use _srcObject as a private property for this shim
+            this._srcObject = stream;
+            if (this.src) {
+              URL.revokeObjectURL(this.src);
+            }
+
+            if (!stream) {
+              this.src = '';
+              return undefined;
+            }
+            this.src = URL.createObjectURL(stream);
+            // We need to recreate the blob url when a track is added or
+            // removed. Doing it manually since we want to avoid a recursion.
+            stream.addEventListener('addtrack', function() {
+              if (self.src) {
+                URL.revokeObjectURL(self.src);
+              }
+              self.src = URL.createObjectURL(stream);
+            });
+            stream.addEventListener('removetrack', function() {
+              if (self.src) {
+                URL.revokeObjectURL(self.src);
+              }
+              self.src = URL.createObjectURL(stream);
+            });
+          }
+        });
+      }
+    }
+  },
+
+  shimAddTrack: function(window) {
+    // shim addTrack (when getSenders is available)
+    if (window.RTCPeerConnection.prototype.addTrack) {
+      return;
+    }
+
+    // also shim pc.getLocalStreams when addTrack is shimmed
+    // to return the original streams.
+    var origGetLocalStreams = window.RTCPeerConnection.prototype
+        .getLocalStreams;
+    window.RTCPeerConnection.prototype.getLocalStreams = function() {
+      var self = this;
+      var nativeStreams = origGetLocalStreams.apply(this);
+      self._reverseStreams = self._reverseStreams || {};
+      return nativeStreams.map(function(stream) {
+        return self._reverseStreams[stream.id];
+      });
+    };
+
+    var origAddStream = window.RTCPeerConnection.prototype.addStream;
+    window.RTCPeerConnection.prototype.addStream = function(stream) {
+      var pc = this;
+      pc._streams = pc._streams || {};
+      pc._reverseStreams = pc._reverseStreams || {};
+
+      // Add identity mapping for consistency with addTrack.
+      // Unless this is being used with a stream from addTrack.
+      if (!pc._reverseStreams[stream.id]) {
+        pc._streams[stream.id] = stream;
+        pc._reverseStreams[stream.id] = stream;
+      }
+      origAddStream.apply(pc, [stream]);
+    };
+
+    var origRemoveStream = window.RTCPeerConnection.prototype.removeStream;
+    window.RTCPeerConnection.prototype.removeStream = function(stream) {
+      var pc = this;
+      pc._streams = pc._streams || {};
+      pc._reverseStreams = pc._reverseStreams || {};
+
+      origRemoveStream.apply(pc, [(pc._streams[stream.id] || stream)]);
+      delete pc._reverseStreams[(pc._streams[stream.id] ?
+          pc._streams[stream.id].id : stream.id)];
+      delete pc._streams[stream.id];
+    };
+
+    window.RTCPeerConnection.prototype.addTrack = function(track, stream) {
+      var pc = this;
+      if (pc.signalingState === 'closed') {
+        throw new DOMException(
+          'The RTCPeerConnection\'s signalingState is \'closed\'.',
+          'InvalidStateError');
+      }
+      var streams = [].slice.call(arguments, 1);
+      if (streams.length !== 1 ||
+          !streams[0].getTracks().find(function(t) {
+            return t === track;
+          })) {
+        // this is not fully correct but all we can manage without
+        // [[associated MediaStreams]] internal slot.
+        throw new DOMException(
+          'The adapter.js addTrack polyfill only supports a single ' +
+          ' stream which is associated with the specified track.',
+          'NotSupportedError');
+      }
+
+      var alreadyExists = pc.getSenders().find(function(s) {
+        return s.track === track;
+      });
+      if (alreadyExists) {
+        throw new DOMException('Track already exists.',
+            'InvalidAccessError');
+      }
+
+      pc._streams = pc._streams || {};
+      pc._reverseStreams = pc._reverseStreams || {};
+      var oldStream = pc._streams[stream.id];
+      if (oldStream) {
+        // this is using odd Chrome behaviour, use with caution:
+        // https://bugs.chromium.org/p/webrtc/issues/detail?id=7815
+        oldStream.addTrack(track);
+        pc.dispatchEvent(new Event('negotiationneeded'));
+      } else {
+        var newStream = new window.MediaStream([track]);
+        pc._streams[stream.id] = newStream;
+        pc._reverseStreams[newStream.id] = stream;
+        pc.addStream(newStream);
+      }
+      return pc.getSenders().find(function(s) {
+        return s.track === track;
+      });
+    };
+  },
+
+  shimPeerConnection: function(window) {
+    var browserDetails = utils.detectBrowser(window);
+
+    // The RTCPeerConnection object.
+    if (!window.RTCPeerConnection) {
+      window.RTCPeerConnection = function(pcConfig, pcConstraints) {
+        // Translate iceTransportPolicy to iceTransports,
+        // see https://code.google.com/p/webrtc/issues/detail?id=4869
+        // this was fixed in M56 along with unprefixing RTCPeerConnection.
+        logging('PeerConnection');
+        if (pcConfig && pcConfig.iceTransportPolicy) {
+          pcConfig.iceTransports = pcConfig.iceTransportPolicy;
+        }
+
+        return new window.webkitRTCPeerConnection(pcConfig, pcConstraints);
+      };
+      window.RTCPeerConnection.prototype =
+          window.webkitRTCPeerConnection.prototype;
+      // wrap static methods. Currently just generateCertificate.
+      if (window.webkitRTCPeerConnection.generateCertificate) {
+        Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
+          get: function() {
+            return window.webkitRTCPeerConnection.generateCertificate;
+          }
+        });
+      }
+    } else {
+      // migrate from non-spec RTCIceServer.url to RTCIceServer.urls
+      var OrigPeerConnection = window.RTCPeerConnection;
+      window.RTCPeerConnection = function(pcConfig, pcConstraints) {
+        if (pcConfig && pcConfig.iceServers) {
+          var newIceServers = [];
+          for (var i = 0; i < pcConfig.iceServers.length; i++) {
+            var server = pcConfig.iceServers[i];
+            if (!server.hasOwnProperty('urls') &&
+                server.hasOwnProperty('url')) {
+              console.warn('RTCIceServer.url is deprecated! Use urls instead.');
+              server = JSON.parse(JSON.stringify(server));
+              server.urls = server.url;
+              newIceServers.push(server);
+            } else {
+              newIceServers.push(pcConfig.iceServers[i]);
+            }
+          }
+          pcConfig.iceServers = newIceServers;
+        }
+        return new OrigPeerConnection(pcConfig, pcConstraints);
+      };
+      window.RTCPeerConnection.prototype = OrigPeerConnection.prototype;
+      // wrap static methods. Currently just generateCertificate.
+      Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
+        get: function() {
+          return OrigPeerConnection.generateCertificate;
+        }
+      });
+    }
+
+    var origGetStats = window.RTCPeerConnection.prototype.getStats;
+    window.RTCPeerConnection.prototype.getStats = function(selector,
+        successCallback, errorCallback) {
+      var self = this;
+      var args = arguments;
+
+      // If selector is a function then we are in the old style stats so just
+      // pass back the original getStats format to avoid breaking old users.
+      if (arguments.length > 0 && typeof selector === 'function') {
+        return origGetStats.apply(this, arguments);
+      }
+
+      // When spec-style getStats is supported, return those when called with
+      // either no arguments or the selector argument is null.
+      if (origGetStats.length === 0 && (arguments.length === 0 ||
+          typeof arguments[0] !== 'function')) {
+        return origGetStats.apply(this, []);
+      }
+
+      var fixChromeStats_ = function(response) {
+        var standardReport = {};
+        var reports = response.result();
+        reports.forEach(function(report) {
+          var standardStats = {
+            id: report.id,
+            timestamp: report.timestamp,
+            type: {
+              localcandidate: 'local-candidate',
+              remotecandidate: 'remote-candidate'
+            }[report.type] || report.type
+          };
+          report.names().forEach(function(name) {
+            standardStats[name] = report.stat(name);
+          });
+          standardReport[standardStats.id] = standardStats;
+        });
+
+        return standardReport;
+      };
+
+      // shim getStats with maplike support
+      var makeMapStats = function(stats) {
+        return new Map(Object.keys(stats).map(function(key) {
+          return [key, stats[key]];
+        }));
+      };
+
+      if (arguments.length >= 2) {
+        var successCallbackWrapper_ = function(response) {
+          args[1](makeMapStats(fixChromeStats_(response)));
+        };
+
+        return origGetStats.apply(this, [successCallbackWrapper_,
+          arguments[0]]);
+      }
+
+      // promise-support
+      return new Promise(function(resolve, reject) {
+        origGetStats.apply(self, [
+          function(response) {
+            resolve(makeMapStats(fixChromeStats_(response)));
+          }, reject]);
+      }).then(successCallback, errorCallback);
+    };
+
+    // add promise support -- natively available in Chrome 51
+    if (browserDetails.version < 51) {
+      ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
+          .forEach(function(method) {
+            var nativeMethod = window.RTCPeerConnection.prototype[method];
+            window.RTCPeerConnection.prototype[method] = function() {
+              var args = arguments;
+              var self = this;
+              var promise = new Promise(function(resolve, reject) {
+                nativeMethod.apply(self, [args[0], resolve, reject]);
+              });
+              if (args.length < 2) {
+                return promise;
+              }
+              return promise.then(function() {
+                args[1].apply(null, []);
+              },
+              function(err) {
+                if (args.length >= 3) {
+                  args[2].apply(null, [err]);
+                }
+              });
+            };
+          });
+    }
+
+    // promise support for createOffer and createAnswer. Available (without
+    // bugs) since M52: crbug/619289
+    if (browserDetails.version < 52) {
+      ['createOffer', 'createAnswer'].forEach(function(method) {
+        var nativeMethod = window.RTCPeerConnection.prototype[method];
+        window.RTCPeerConnection.prototype[method] = function() {
+          var self = this;
+          if (arguments.length < 1 || (arguments.length === 1 &&
+              typeof arguments[0] === 'object')) {
+            var opts = arguments.length === 1 ? arguments[0] : undefined;
+            return new Promise(function(resolve, reject) {
+              nativeMethod.apply(self, [resolve, reject, opts]);
+            });
+          }
+          return nativeMethod.apply(this, arguments);
+        };
+      });
+    }
+
+    // shim implicit creation of RTCSessionDescription/RTCIceCandidate
+    ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
+        .forEach(function(method) {
+          var nativeMethod = window.RTCPeerConnection.prototype[method];
+          window.RTCPeerConnection.prototype[method] = function() {
+            arguments[0] = new ((method === 'addIceCandidate') ?
+                window.RTCIceCandidate :
+                window.RTCSessionDescription)(arguments[0]);
+            return nativeMethod.apply(this, arguments);
+          };
+        });
+
+    // support for addIceCandidate(null or undefined)
+    var nativeAddIceCandidate =
+        window.RTCPeerConnection.prototype.addIceCandidate;
+    window.RTCPeerConnection.prototype.addIceCandidate = function() {
+      if (!arguments[0]) {
+        if (arguments[1]) {
+          arguments[1].apply(null);
+        }
+        return Promise.resolve();
+      }
+      return nativeAddIceCandidate.apply(this, arguments);
+    };
+  }
+};
+
+
+// Expose public methods.
+module.exports = {
+  shimMediaStream: chromeShim.shimMediaStream,
+  shimOnTrack: chromeShim.shimOnTrack,
+  shimAddTrack: chromeShim.shimAddTrack,
+  shimGetSendersWithDtmf: chromeShim.shimGetSendersWithDtmf,
+  shimSourceObject: chromeShim.shimSourceObject,
+  shimPeerConnection: chromeShim.shimPeerConnection,
+  shimGetUserMedia: require('./getusermedia')
+};
diff --git a/src/js/chrome/getusermedia.js b/src/js/chrome/getusermedia.js
new file mode 100644
index 0000000..ec0b928
--- /dev/null
+++ b/src/js/chrome/getusermedia.js
@@ -0,0 +1,235 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+var utils = require('../utils.js');
+var logging = utils.log;
+
+// Expose public methods.
+module.exports = function(window) {
+  var browserDetails = utils.detectBrowser(window);
+  var navigator = window && window.navigator;
+
+  var constraintsToChrome_ = function(c) {
+    if (typeof c !== 'object' || c.mandatory || c.optional) {
+      return c;
+    }
+    var cc = {};
+    Object.keys(c).forEach(function(key) {
+      if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
+        return;
+      }
+      var r = (typeof c[key] === 'object') ? c[key] : {ideal: c[key]};
+      if (r.exact !== undefined && typeof r.exact === 'number') {
+        r.min = r.max = r.exact;
+      }
+      var oldname_ = function(prefix, name) {
+        if (prefix) {
+          return prefix + name.charAt(0).toUpperCase() + name.slice(1);
+        }
+        return (name === 'deviceId') ? 'sourceId' : name;
+      };
+      if (r.ideal !== undefined) {
+        cc.optional = cc.optional || [];
+        var oc = {};
+        if (typeof r.ideal === 'number') {
+          oc[oldname_('min', key)] = r.ideal;
+          cc.optional.push(oc);
+          oc = {};
+          oc[oldname_('max', key)] = r.ideal;
+          cc.optional.push(oc);
+        } else {
+          oc[oldname_('', key)] = r.ideal;
+          cc.optional.push(oc);
+        }
+      }
+      if (r.exact !== undefined && typeof r.exact !== 'number') {
+        cc.mandatory = cc.mandatory || {};
+        cc.mandatory[oldname_('', key)] = r.exact;
+      } else {
+        ['min', 'max'].forEach(function(mix) {
+          if (r[mix] !== undefined) {
+            cc.mandatory = cc.mandatory || {};
+            cc.mandatory[oldname_(mix, key)] = r[mix];
+          }
+        });
+      }
+    });
+    if (c.advanced) {
+      cc.optional = (cc.optional || []).concat(c.advanced);
+    }
+    return cc;
+  };
+
+  var shimConstraints_ = function(constraints, func) {
+    constraints = JSON.parse(JSON.stringify(constraints));
+    if (constraints && typeof constraints.audio === 'object') {
+      var remap = function(obj, a, b) {
+        if (a in obj && !(b in obj)) {
+          obj[b] = obj[a];
+          delete obj[a];
+        }
+      };
+      constraints = JSON.parse(JSON.stringify(constraints));
+      remap(constraints.audio, 'autoGainControl', 'googAutoGainControl');
+      remap(constraints.audio, 'noiseSuppression', 'googNoiseSuppression');
+      constraints.audio = constraintsToChrome_(constraints.audio);
+    }
+    if (constraints && typeof constraints.video === 'object') {
+      // Shim facingMode for mobile & surface pro.
+      var face = constraints.video.facingMode;
+      face = face && ((typeof face === 'object') ? face : {ideal: face});
+      var getSupportedFacingModeLies = browserDetails.version < 61;
+
+      if ((face && (face.exact === 'user' || face.exact === 'environment' ||
+                    face.ideal === 'user' || face.ideal === 'environment')) &&
+          !(navigator.mediaDevices.getSupportedConstraints &&
+            navigator.mediaDevices.getSupportedConstraints().facingMode &&
+            !getSupportedFacingModeLies)) {
+        delete constraints.video.facingMode;
+        var matches;
+        if (face.exact === 'environment' || face.ideal === 'environment') {
+          matches = ['back', 'rear'];
+        } else if (face.exact === 'user' || face.ideal === 'user') {
+          matches = ['front'];
+        }
+        if (matches) {
+          // Look for matches in label, or use last cam for back (typical).
+          return navigator.mediaDevices.enumerateDevices()
+          .then(function(devices) {
+            devices = devices.filter(function(d) {
+              return d.kind === 'videoinput';
+            });
+            var dev = devices.find(function(d) {
+              return matches.some(function(match) {
+                return d.label.toLowerCase().indexOf(match) !== -1;
+              });
+            });
+            if (!dev && devices.length && matches.indexOf('back') !== -1) {
+              dev = devices[devices.length - 1]; // more likely the back cam
+            }
+            if (dev) {
+              constraints.video.deviceId = face.exact ? {exact: dev.deviceId} :
+                                                        {ideal: dev.deviceId};
+            }
+            constraints.video = constraintsToChrome_(constraints.video);
+            logging('chrome: ' + JSON.stringify(constraints));
+            return func(constraints);
+          });
+        }
+      }
+      constraints.video = constraintsToChrome_(constraints.video);
+    }
+    logging('chrome: ' + JSON.stringify(constraints));
+    return func(constraints);
+  };
+
+  var shimError_ = function(e) {
+    return {
+      name: {
+        PermissionDeniedError: 'NotAllowedError',
+        InvalidStateError: 'NotReadableError',
+        DevicesNotFoundError: 'NotFoundError',
+        ConstraintNotSatisfiedError: 'OverconstrainedError',
+        TrackStartError: 'NotReadableError',
+        MediaDeviceFailedDueToShutdown: 'NotReadableError',
+        MediaDeviceKillSwitchOn: 'NotReadableError'
+      }[e.name] || e.name,
+      message: e.message,
+      constraint: e.constraintName,
+      toString: function() {
+        return this.name + (this.message && ': ') + this.message;
+      }
+    };
+  };
+
+  var getUserMedia_ = function(constraints, onSuccess, onError) {
+    shimConstraints_(constraints, function(c) {
+      navigator.webkitGetUserMedia(c, onSuccess, function(e) {
+        onError(shimError_(e));
+      });
+    });
+  };
+
+  navigator.getUserMedia = getUserMedia_;
+
+  // Returns the result of getUserMedia as a Promise.
+  var getUserMediaPromise_ = function(constraints) {
+    return new Promise(function(resolve, reject) {
+      navigator.getUserMedia(constraints, resolve, reject);
+    });
+  };
+
+  if (!navigator.mediaDevices) {
+    navigator.mediaDevices = {
+      getUserMedia: getUserMediaPromise_,
+      enumerateDevices: function() {
+        return new Promise(function(resolve) {
+          var kinds = {audio: 'audioinput', video: 'videoinput'};
+          return window.MediaStreamTrack.getSources(function(devices) {
+            resolve(devices.map(function(device) {
+              return {label: device.label,
+                kind: kinds[device.kind],
+                deviceId: device.id,
+                groupId: ''};
+            }));
+          });
+        });
+      },
+      getSupportedConstraints: function() {
+        return {
+          deviceId: true, echoCancellation: true, facingMode: true,
+          frameRate: true, height: true, width: true
+        };
+      }
+    };
+  }
+
+  // A shim for getUserMedia method on the mediaDevices object.
+  // TODO(KaptenJansson) remove once implemented in Chrome stable.
+  if (!navigator.mediaDevices.getUserMedia) {
+    navigator.mediaDevices.getUserMedia = function(constraints) {
+      return getUserMediaPromise_(constraints);
+    };
+  } else {
+    // Even though Chrome 45 has navigator.mediaDevices and a getUserMedia
+    // function which returns a Promise, it does not accept spec-style
+    // constraints.
+    var origGetUserMedia = navigator.mediaDevices.getUserMedia.
+        bind(navigator.mediaDevices);
+    navigator.mediaDevices.getUserMedia = function(cs) {
+      return shimConstraints_(cs, function(c) {
+        return origGetUserMedia(c).then(function(stream) {
+          if (c.audio && !stream.getAudioTracks().length ||
+              c.video && !stream.getVideoTracks().length) {
+            stream.getTracks().forEach(function(track) {
+              track.stop();
+            });
+            throw new DOMException('', 'NotFoundError');
+          }
+          return stream;
+        }, function(e) {
+          return Promise.reject(shimError_(e));
+        });
+      });
+    };
+  }
+
+  // Dummy devicechange event methods.
+  // TODO(KaptenJansson) remove once implemented in Chrome stable.
+  if (typeof navigator.mediaDevices.addEventListener === 'undefined') {
+    navigator.mediaDevices.addEventListener = function() {
+      logging('Dummy mediaDevices.addEventListener called.');
+    };
+  }
+  if (typeof navigator.mediaDevices.removeEventListener === 'undefined') {
+    navigator.mediaDevices.removeEventListener = function() {
+      logging('Dummy mediaDevices.removeEventListener called.');
+    };
+  }
+};
diff --git a/src/js/edge/edge_shim.js b/src/js/edge/edge_shim.js
new file mode 100644
index 0000000..eec81af
--- /dev/null
+++ b/src/js/edge/edge_shim.js
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+var utils = require('../utils');
+var shimRTCPeerConnection = require('./rtcpeerconnection_shim');
+
+module.exports = {
+  shimGetUserMedia: require('./getusermedia'),
+  shimPeerConnection: function(window) {
+    var browserDetails = utils.detectBrowser(window);
+
+    if (window.RTCIceGatherer) {
+      // ORTC defines an RTCIceCandidate object but no constructor.
+      // Not implemented in Edge.
+      if (!window.RTCIceCandidate) {
+        window.RTCIceCandidate = function(args) {
+          return args;
+        };
+      }
+      // ORTC does not have a session description object but
+      // other browsers (i.e. Chrome) that will support both PC and ORTC
+      // in the future might have this defined already.
+      if (!window.RTCSessionDescription) {
+        window.RTCSessionDescription = function(args) {
+          return args;
+        };
+      }
+      // this adds an additional event listener to MediaStrackTrack that signals
+      // when a tracks enabled property was changed. Workaround for a bug in
+      // addStream, see below. No longer required in 15025+
+      if (browserDetails.version < 15025) {
+        var origMSTEnabled = Object.getOwnPropertyDescriptor(
+            window.MediaStreamTrack.prototype, 'enabled');
+        Object.defineProperty(window.MediaStreamTrack.prototype, 'enabled', {
+          set: function(value) {
+            origMSTEnabled.set.call(this, value);
+            var ev = new Event('enabled');
+            ev.enabled = value;
+            this.dispatchEvent(ev);
+          }
+        });
+      }
+    }
+
+    // ORTC defines the DTMF sender a bit different.
+    // https://github.com/w3c/ortc/issues/714
+    if (window.RTCRtpSender && !('dtmf' in window.RTCRtpSender.prototype)) {
+      Object.defineProperty(window.RTCRtpSender.prototype, 'dtmf', {
+        get: function() {
+          if (this._dtmf === undefined) {
+            if (this.track.kind === 'audio') {
+              this._dtmf = new window.RTCDtmfSender(this);
+            } else if (this.track.kind === 'video') {
+              this._dtmf = null;
+            }
+          }
+          return this._dtmf;
+        }
+      });
+    }
+
+    window.RTCPeerConnection =
+        shimRTCPeerConnection(window, browserDetails.version);
+  },
+  shimReplaceTrack: function(window) {
+    // ORTC has replaceTrack -- https://github.com/w3c/ortc/issues/614
+    if (window.RTCRtpSender &&
+        !('replaceTrack' in window.RTCRtpSender.prototype)) {
+      window.RTCRtpSender.prototype.replaceTrack =
+          window.RTCRtpSender.prototype.setTrack;
+    }
+  }
+};
diff --git a/src/js/edge/getusermedia.js b/src/js/edge/getusermedia.js
new file mode 100644
index 0000000..57c3773
--- /dev/null
+++ b/src/js/edge/getusermedia.js
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+// Expose public methods.
+module.exports = function(window) {
+  var navigator = window && window.navigator;
+
+  var shimError_ = function(e) {
+    return {
+      name: {PermissionDeniedError: 'NotAllowedError'}[e.name] || e.name,
+      message: e.message,
+      constraint: e.constraint,
+      toString: function() {
+        return this.name;
+      }
+    };
+  };
+
+  // getUserMedia error shim.
+  var origGetUserMedia = navigator.mediaDevices.getUserMedia.
+      bind(navigator.mediaDevices);
+  navigator.mediaDevices.getUserMedia = function(c) {
+    return origGetUserMedia(c).catch(function(e) {
+      return Promise.reject(shimError_(e));
+    });
+  };
+};
diff --git a/src/js/edge/rtcpeerconnection_shim.js b/src/js/edge/rtcpeerconnection_shim.js
new file mode 100644
index 0000000..412110d
--- /dev/null
+++ b/src/js/edge/rtcpeerconnection_shim.js
@@ -0,0 +1,1389 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+var SDPUtils = require('sdp');
+
+// sort tracks such that they follow an a-v-a-v...
+// pattern.
+function sortTracks(tracks) {
+  var audioTracks = tracks.filter(function(track) {
+    return track.kind === 'audio';
+  });
+  var videoTracks = tracks.filter(function(track) {
+    return track.kind === 'video';
+  });
+  tracks = [];
+  while (audioTracks.length || videoTracks.length) {
+    if (audioTracks.length) {
+      tracks.push(audioTracks.shift());
+    }
+    if (videoTracks.length) {
+      tracks.push(videoTracks.shift());
+    }
+  }
+  return tracks;
+}
+
+// Edge does not like
+// 1) stun:
+// 2) turn: that does not have all of turn:host:port?transport=udp
+// 3) turn: with ipv6 addresses
+// 4) turn: occurring muliple times
+function filterIceServers(iceServers, edgeVersion) {
+  var hasTurn = false;
+  iceServers = JSON.parse(JSON.stringify(iceServers));
+  return iceServers.filter(function(server) {
+    if (server && (server.urls || server.url)) {
+      var urls = server.urls || server.url;
+      if (server.url && !server.urls) {
+        console.warn('RTCIceServer.url is deprecated! Use urls instead.');
+      }
+      var isString = typeof urls === 'string';
+      if (isString) {
+        urls = [urls];
+      }
+      urls = urls.filter(function(url) {
+        var validTurn = url.indexOf('turn:') === 0 &&
+            url.indexOf('transport=udp') !== -1 &&
+            url.indexOf('turn:[') === -1 &&
+            !hasTurn;
+
+        if (validTurn) {
+          hasTurn = true;
+          return true;
+        }
+        return url.indexOf('stun:') === 0 && edgeVersion >= 14393;
+      });
+
+      delete server.url;
+      server.urls = isString ? urls[0] : urls;
+      return !!urls.length;
+    }
+    return false;
+  });
+}
+
+// Determines the intersection of local and remote capabilities.
+function getCommonCapabilities(localCapabilities, remoteCapabilities) {
+  var commonCapabilities = {
+    codecs: [],
+    headerExtensions: [],
+    fecMechanisms: []
+  };
+
+  var findCodecByPayloadType = function(pt, codecs) {
+    pt = parseInt(pt, 10);
+    for (var i = 0; i < codecs.length; i++) {
+      if (codecs[i].payloadType === pt ||
+          codecs[i].preferredPayloadType === pt) {
+        return codecs[i];
+      }
+    }
+  };
+
+  var rtxCapabilityMatches = function(lRtx, rRtx, lCodecs, rCodecs) {
+    var lCodec = findCodecByPayloadType(lRtx.parameters.apt, lCodecs);
+    var rCodec = findCodecByPayloadType(rRtx.parameters.apt, rCodecs);
+    return lCodec && rCodec &&
+        lCodec.name.toLowerCase() === rCodec.name.toLowerCase();
+  };
+
+  localCapabilities.codecs.forEach(function(lCodec) {
+    for (var i = 0; i < remoteCapabilities.codecs.length; i++) {
+      var rCodec = remoteCapabilities.codecs[i];
+      if (lCodec.name.toLowerCase() === rCodec.name.toLowerCase() &&
+          lCodec.clockRate === rCodec.clockRate) {
+        if (lCodec.name.toLowerCase() === 'rtx' &&
+            lCodec.parameters && rCodec.parameters.apt) {
+          // for RTX we need to find the local rtx that has a apt
+          // which points to the same local codec as the remote one.
+          if (!rtxCapabilityMatches(lCodec, rCodec,
+              localCapabilities.codecs, remoteCapabilities.codecs)) {
+            continue;
+          }
+        }
+        rCodec = JSON.parse(JSON.stringify(rCodec)); // deepcopy
+        // number of channels is the highest common number of channels
+        rCodec.numChannels = Math.min(lCodec.numChannels,
+            rCodec.numChannels);
+        // push rCodec so we reply with offerer payload type
+        commonCapabilities.codecs.push(rCodec);
+
+        // determine common feedback mechanisms
+        rCodec.rtcpFeedback = rCodec.rtcpFeedback.filter(function(fb) {
+          for (var j = 0; j < lCodec.rtcpFeedback.length; j++) {
+            if (lCodec.rtcpFeedback[j].type === fb.type &&
+                lCodec.rtcpFeedback[j].parameter === fb.parameter) {
+              return true;
+            }
+          }
+          return false;
+        });
+        // FIXME: also need to determine .parameters
+        //  see https://github.com/openpeer/ortc/issues/569
+        break;
+      }
+    }
+  });
+
+  localCapabilities.headerExtensions.forEach(function(lHeaderExtension) {
+    for (var i = 0; i < remoteCapabilities.headerExtensions.length;
+         i++) {
+      var rHeaderExtension = remoteCapabilities.headerExtensions[i];
+      if (lHeaderExtension.uri === rHeaderExtension.uri) {
+        commonCapabilities.headerExtensions.push(rHeaderExtension);
+        break;
+      }
+    }
+  });
+
+  // FIXME: fecMechanisms
+  return commonCapabilities;
+}
+
+// is action=setLocalDescription with type allowed in signalingState
+function isActionAllowedInSignalingState(action, type, signalingState) {
+  return {
+    offer: {
+      setLocalDescription: ['stable', 'have-local-offer'],
+      setRemoteDescription: ['stable', 'have-remote-offer']
+    },
+    answer: {
+      setLocalDescription: ['have-remote-offer', 'have-local-pranswer'],
+      setRemoteDescription: ['have-local-offer', 'have-remote-pranswer']
+    }
+  }[type][action].indexOf(signalingState) !== -1;
+}
+
+module.exports = function(window, edgeVersion) {
+  var RTCPeerConnection = function(config) {
+    var self = this;
+
+    var _eventTarget = document.createDocumentFragment();
+    ['addEventListener', 'removeEventListener', 'dispatchEvent']
+        .forEach(function(method) {
+          self[method] = _eventTarget[method].bind(_eventTarget);
+        });
+
+    this.needNegotiation = false;
+
+    this.onicecandidate = null;
+    this.onaddstream = null;
+    this.ontrack = null;
+    this.onremovestream = null;
+    this.onsignalingstatechange = null;
+    this.oniceconnectionstatechange = null;
+    this.onicegatheringstatechange = null;
+    this.onnegotiationneeded = null;
+    this.ondatachannel = null;
+    this.canTrickleIceCandidates = null;
+
+    this.localStreams = [];
+    this.remoteStreams = [];
+    this.getLocalStreams = function() {
+      return self.localStreams;
+    };
+    this.getRemoteStreams = function() {
+      return self.remoteStreams;
+    };
+
+    this.localDescription = new window.RTCSessionDescription({
+      type: '',
+      sdp: ''
+    });
+    this.remoteDescription = new window.RTCSessionDescription({
+      type: '',
+      sdp: ''
+    });
+    this.signalingState = 'stable';
+    this.iceConnectionState = 'new';
+    this.iceGatheringState = 'new';
+
+    this.iceOptions = {
+      gatherPolicy: 'all',
+      iceServers: []
+    };
+    if (config && config.iceTransportPolicy) {
+      switch (config.iceTransportPolicy) {
+        case 'all':
+        case 'relay':
+          this.iceOptions.gatherPolicy = config.iceTransportPolicy;
+          break;
+        default:
+          // don't set iceTransportPolicy.
+          break;
+      }
+    }
+    this.usingBundle = config && config.bundlePolicy === 'max-bundle';
+
+    if (config && config.iceServers) {
+      this.iceOptions.iceServers = filterIceServers(config.iceServers,
+          edgeVersion);
+    }
+    this._config = config || {};
+
+    // per-track iceGathers, iceTransports, dtlsTransports, rtpSenders, ...
+    // everything that is needed to describe a SDP m-line.
+    this.transceivers = [];
+
+    // since the iceGatherer is currently created in createOffer but we
+    // must not emit candidates until after setLocalDescription we buffer
+    // them in this array.
+    this._localIceCandidatesBuffer = [];
+
+    this._sdpSessionId = SDPUtils.generateSessionId();
+  };
+
+  RTCPeerConnection.prototype._emitGatheringStateChange = function() {
+    var event = new Event('icegatheringstatechange');
+    this.dispatchEvent(event);
+    if (this.onicegatheringstatechange !== null) {
+      this.onicegatheringstatechange(event);
+    }
+  };
+
+  RTCPeerConnection.prototype._emitBufferedCandidates = function() {
+    var self = this;
+    var sections = SDPUtils.splitSections(self.localDescription.sdp);
+    // FIXME: need to apply ice candidates in a way which is async but
+    // in-order
+    this._localIceCandidatesBuffer.forEach(function(event) {
+      var end = !event.candidate || Object.keys(event.candidate).length === 0;
+      if (end) {
+        for (var j = 1; j < sections.length; j++) {
+          if (sections[j].indexOf('\r\na=end-of-candidates\r\n') === -1) {
+            sections[j] += 'a=end-of-candidates\r\n';
+          }
+        }
+      } else {
+        sections[event.candidate.sdpMLineIndex + 1] +=
+            'a=' + event.candidate.candidate + '\r\n';
+      }
+      self.localDescription.sdp = sections.join('');
+      self.dispatchEvent(event);
+      if (self.onicecandidate !== null) {
+        self.onicecandidate(event);
+      }
+      if (!event.candidate && self.iceGatheringState !== 'complete') {
+        var complete = self.transceivers.every(function(transceiver) {
+          return transceiver.iceGatherer &&
+              transceiver.iceGatherer.state === 'completed';
+        });
+        if (complete && self.iceGatheringStateChange !== 'complete') {
+          self.iceGatheringState = 'complete';
+          self._emitGatheringStateChange();
+        }
+      }
+    });
+    this._localIceCandidatesBuffer = [];
+  };
+
+  RTCPeerConnection.prototype.getConfiguration = function() {
+    return this._config;
+  };
+
+  // internal helper to create a transceiver object.
+  // (whih is not yet the same as the WebRTC 1.0 transceiver)
+  RTCPeerConnection.prototype._createTransceiver = function(kind) {
+    var hasBundleTransport = this.transceivers.length > 0;
+    var transceiver = {
+      track: null,
+      iceGatherer: null,
+      iceTransport: null,
+      dtlsTransport: null,
+      localCapabilities: null,
+      remoteCapabilities: null,
+      rtpSender: null,
+      rtpReceiver: null,
+      kind: kind,
+      mid: null,
+      sendEncodingParameters: null,
+      recvEncodingParameters: null,
+      stream: null,
+      wantReceive: true
+    };
+    if (this.usingBundle && hasBundleTransport) {
+      transceiver.iceTransport = this.transceivers[0].iceTransport;
+      transceiver.dtlsTransport = this.transceivers[0].dtlsTransport;
+    } else {
+      var transports = this._createIceAndDtlsTransports();
+      transceiver.iceTransport = transports.iceTransport;
+      transceiver.dtlsTransport = transports.dtlsTransport;
+    }
+    this.transceivers.push(transceiver);
+    return transceiver;
+  };
+
+  RTCPeerConnection.prototype.addTrack = function(track, stream) {
+    var transceiver;
+    for (var i = 0; i < this.transceivers.length; i++) {
+      if (!this.transceivers[i].track &&
+          this.transceivers[i].kind === track.kind) {
+        transceiver = this.transceivers[i];
+      }
+    }
+    if (!transceiver) {
+      transceiver = this._createTransceiver(track.kind);
+    }
+
+    transceiver.track = track;
+    transceiver.stream = stream;
+    transceiver.rtpSender = new window.RTCRtpSender(track,
+        transceiver.dtlsTransport);
+
+    this._maybeFireNegotiationNeeded();
+    return transceiver.rtpSender;
+  };
+
+  RTCPeerConnection.prototype.addStream = function(stream) {
+    var self = this;
+    if (edgeVersion >= 15025) {
+      this.localStreams.push(stream);
+      stream.getTracks().forEach(function(track) {
+        self.addTrack(track, stream);
+      });
+    } else {
+      // Clone is necessary for local demos mostly, attaching directly
+      // to two different senders does not work (build 10547).
+      // Fixed in 15025 (or earlier)
+      var clonedStream = stream.clone();
+      stream.getTracks().forEach(function(track, idx) {
+        var clonedTrack = clonedStream.getTracks()[idx];
+        track.addEventListener('enabled', function(event) {
+          clonedTrack.enabled = event.enabled;
+        });
+      });
+      clonedStream.getTracks().forEach(function(track) {
+        self.addTrack(track, clonedStream);
+      });
+      this.localStreams.push(clonedStream);
+    }
+    this._maybeFireNegotiationNeeded();
+  };
+
+  RTCPeerConnection.prototype.removeStream = function(stream) {
+    var idx = this.localStreams.indexOf(stream);
+    if (idx > -1) {
+      this.localStreams.splice(idx, 1);
+      this._maybeFireNegotiationNeeded();
+    }
+  };
+
+  RTCPeerConnection.prototype.getSenders = function() {
+    return this.transceivers.filter(function(transceiver) {
+      return !!transceiver.rtpSender;
+    })
+    .map(function(transceiver) {
+      return transceiver.rtpSender;
+    });
+  };
+
+  RTCPeerConnection.prototype.getReceivers = function() {
+    return this.transceivers.filter(function(transceiver) {
+      return !!transceiver.rtpReceiver;
+    })
+    .map(function(transceiver) {
+      return transceiver.rtpReceiver;
+    });
+  };
+
+  // Create ICE gatherer and hook it up.
+  RTCPeerConnection.prototype._createIceGatherer = function(mid,
+      sdpMLineIndex) {
+    var self = this;
+    var iceGatherer = new window.RTCIceGatherer(self.iceOptions);
+    iceGatherer.onlocalcandidate = function(evt) {
+      var event = new Event('icecandidate');
+      event.candidate = {sdpMid: mid, sdpMLineIndex: sdpMLineIndex};
+
+      var cand = evt.candidate;
+      var end = !cand || Object.keys(cand).length === 0;
+      // Edge emits an empty object for RTCIceCandidateComplete‥
+      if (end) {
+        // polyfill since RTCIceGatherer.state is not implemented in
+        // Edge 10547 yet.
+        if (iceGatherer.state === undefined) {
+          iceGatherer.state = 'completed';
+        }
+      } else {
+        // RTCIceCandidate doesn't have a component, needs to be added
+        cand.component = 1;
+        event.candidate.candidate = SDPUtils.writeCandidate(cand);
+      }
+
+      // update local description.
+      var sections = SDPUtils.splitSections(self.localDescription.sdp);
+      if (!end) {
+        sections[event.candidate.sdpMLineIndex + 1] +=
+            'a=' + event.candidate.candidate + '\r\n';
+      } else {
+        sections[event.candidate.sdpMLineIndex + 1] +=
+            'a=end-of-candidates\r\n';
+      }
+      self.localDescription.sdp = sections.join('');
+      var transceivers = self._pendingOffer ? self._pendingOffer :
+          self.transceivers;
+      var complete = transceivers.every(function(transceiver) {
+        return transceiver.iceGatherer &&
+            transceiver.iceGatherer.state === 'completed';
+      });
+
+      // Emit candidate if localDescription is set.
+      // Also emits null candidate when all gatherers are complete.
+      switch (self.iceGatheringState) {
+        case 'new':
+          if (!end) {
+            self._localIceCandidatesBuffer.push(event);
+          }
+          if (end && complete) {
+            self._localIceCandidatesBuffer.push(
+                new Event('icecandidate'));
+          }
+          break;
+        case 'gathering':
+          self._emitBufferedCandidates();
+          if (!end) {
+            self.dispatchEvent(event);
+            if (self.onicecandidate !== null) {
+              self.onicecandidate(event);
+            }
+          }
+          if (complete) {
+            self.dispatchEvent(new Event('icecandidate'));
+            if (self.onicecandidate !== null) {
+              self.onicecandidate(new Event('icecandidate'));
+            }
+            self.iceGatheringState = 'complete';
+            self._emitGatheringStateChange();
+          }
+          break;
+        case 'complete':
+          // should not happen... currently!
+          break;
+        default: // no-op.
+          break;
+      }
+    };
+    return iceGatherer;
+  };
+
+  // Create ICE transport and DTLS transport.
+  RTCPeerConnection.prototype._createIceAndDtlsTransports = function() {
+    var self = this;
+    var iceTransport = new window.RTCIceTransport(null);
+    iceTransport.onicestatechange = function() {
+      self._updateConnectionState();
+    };
+
+    var dtlsTransport = new window.RTCDtlsTransport(iceTransport);
+    dtlsTransport.ondtlsstatechange = function() {
+      self._updateConnectionState();
+    };
+    dtlsTransport.onerror = function() {
+      // onerror does not set state to failed by itself.
+      Object.defineProperty(dtlsTransport, 'state',
+          {value: 'failed', writable: true});
+      self._updateConnectionState();
+    };
+
+    return {
+      iceTransport: iceTransport,
+      dtlsTransport: dtlsTransport
+    };
+  };
+
+  // Destroy ICE gatherer, ICE transport and DTLS transport.
+  // Without triggering the callbacks.
+  RTCPeerConnection.prototype._disposeIceAndDtlsTransports = function(
+      sdpMLineIndex) {
+    var iceGatherer = this.transceivers[sdpMLineIndex].iceGatherer;
+    if (iceGatherer) {
+      delete iceGatherer.onlocalcandidate;
+      delete this.transceivers[sdpMLineIndex].iceGatherer;
+    }
+    var iceTransport = this.transceivers[sdpMLineIndex].iceTransport;
+    if (iceTransport) {
+      delete iceTransport.onicestatechange;
+      delete this.transceivers[sdpMLineIndex].iceTransport;
+    }
+    var dtlsTransport = this.transceivers[sdpMLineIndex].dtlsTransport;
+    if (dtlsTransport) {
+      delete dtlsTransport.ondtlsstatechange;
+      delete dtlsTransport.onerror;
+      delete this.transceivers[sdpMLineIndex].dtlsTransport;
+    }
+  };
+
+  // Start the RTP Sender and Receiver for a transceiver.
+  RTCPeerConnection.prototype._transceive = function(transceiver,
+      send, recv) {
+    var params = getCommonCapabilities(transceiver.localCapabilities,
+        transceiver.remoteCapabilities);
+    if (send && transceiver.rtpSender) {
+      params.encodings = transceiver.sendEncodingParameters;
+      params.rtcp = {
+        cname: SDPUtils.localCName,
+        compound: transceiver.rtcpParameters.compound
+      };
+      if (transceiver.recvEncodingParameters.length) {
+        params.rtcp.ssrc = transceiver.recvEncodingParameters[0].ssrc;
+      }
+      transceiver.rtpSender.send(params);
+    }
+    if (recv && transceiver.rtpReceiver) {
+      // remove RTX field in Edge 14942
+      if (transceiver.kind === 'video'
+          && transceiver.recvEncodingParameters
+          && edgeVersion < 15019) {
+        transceiver.recvEncodingParameters.forEach(function(p) {
+          delete p.rtx;
+        });
+      }
+      params.encodings = transceiver.recvEncodingParameters;
+      params.rtcp = {
+        cname: transceiver.rtcpParameters.cname,
+        compound: transceiver.rtcpParameters.compound
+      };
+      if (transceiver.sendEncodingParameters.length) {
+        params.rtcp.ssrc = transceiver.sendEncodingParameters[0].ssrc;
+      }
+      transceiver.rtpReceiver.receive(params);
+    }
+  };
+
+  RTCPeerConnection.prototype.setLocalDescription = function(description) {
+    var self = this;
+
+    if (!isActionAllowedInSignalingState('setLocalDescription',
+        description.type, this.signalingState)) {
+      var e = new Error('Can not set local ' + description.type +
+          ' in state ' + this.signalingState);
+      e.name = 'InvalidStateError';
+      if (arguments.length > 2 && typeof arguments[2] === 'function') {
+        window.setTimeout(arguments[2], 0, e);
+      }
+      return Promise.reject(e);
+    }
+
+    var sections;
+    var sessionpart;
+    if (description.type === 'offer') {
+      // FIXME: What was the purpose of this empty if statement?
+      // if (!this._pendingOffer) {
+      // } else {
+      if (this._pendingOffer) {
+        // VERY limited support for SDP munging. Limited to:
+        // * changing the order of codecs
+        sections = SDPUtils.splitSections(description.sdp);
+        sessionpart = sections.shift();
+        sections.forEach(function(mediaSection, sdpMLineIndex) {
+          var caps = SDPUtils.parseRtpParameters(mediaSection);
+          self._pendingOffer[sdpMLineIndex].localCapabilities = caps;
+        });
+        this.transceivers = this._pendingOffer;
+        delete this._pendingOffer;
+      }
+    } else if (description.type === 'answer') {
+      sections = SDPUtils.splitSections(self.remoteDescription.sdp);
+      sessionpart = sections.shift();
+      var isIceLite = SDPUtils.matchPrefix(sessionpart,
+          'a=ice-lite').length > 0;
+      sections.forEach(function(mediaSection, sdpMLineIndex) {
+        var transceiver = self.transceivers[sdpMLineIndex];
+        var iceGatherer = transceiver.iceGatherer;
+        var iceTransport = transceiver.iceTransport;
+        var dtlsTransport = transceiver.dtlsTransport;
+        var localCapabilities = transceiver.localCapabilities;
+        var remoteCapabilities = transceiver.remoteCapabilities;
+
+        var rejected = SDPUtils.isRejected(mediaSection);
+
+        if (!rejected && !transceiver.isDatachannel) {
+          var remoteIceParameters = SDPUtils.getIceParameters(
+              mediaSection, sessionpart);
+          var remoteDtlsParameters = SDPUtils.getDtlsParameters(
+              mediaSection, sessionpart);
+          if (isIceLite) {
+            remoteDtlsParameters.role = 'server';
+          }
+
+          if (!self.usingBundle || sdpMLineIndex === 0) {
+            iceTransport.start(iceGatherer, remoteIceParameters,
+                isIceLite ? 'controlling' : 'controlled');
+            dtlsTransport.start(remoteDtlsParameters);
+          }
+
+          // Calculate intersection of capabilities.
+          var params = getCommonCapabilities(localCapabilities,
+              remoteCapabilities);
+
+          // Start the RTCRtpSender. The RTCRtpReceiver for this
+          // transceiver has already been started in setRemoteDescription.
+          self._transceive(transceiver,
+              params.codecs.length > 0,
+              false);
+        }
+      });
+    }
+
+    this.localDescription = {
+      type: description.type,
+      sdp: description.sdp
+    };
+    switch (description.type) {
+      case 'offer':
+        this._updateSignalingState('have-local-offer');
+        break;
+      case 'answer':
+        this._updateSignalingState('stable');
+        break;
+      default:
+        throw new TypeError('unsupported type "' + description.type +
+            '"');
+    }
+
+    // If a success callback was provided, emit ICE candidates after it
+    // has been executed. Otherwise, emit callback after the Promise is
+    // resolved.
+    var hasCallback = arguments.length > 1 &&
+      typeof arguments[1] === 'function';
+    if (hasCallback) {
+      var cb = arguments[1];
+      window.setTimeout(function() {
+        cb();
+        if (self.iceGatheringState === 'new') {
+          self.iceGatheringState = 'gathering';
+          self._emitGatheringStateChange();
+        }
+        self._emitBufferedCandidates();
+      }, 0);
+    }
+    var p = Promise.resolve();
+    p.then(function() {
+      if (!hasCallback) {
+        if (self.iceGatheringState === 'new') {
+          self.iceGatheringState = 'gathering';
+          self._emitGatheringStateChange();
+        }
+        // Usually candidates will be emitted earlier.
+        window.setTimeout(self._emitBufferedCandidates.bind(self), 500);
+      }
+    });
+    return p;
+  };
+
+  RTCPeerConnection.prototype.setRemoteDescription = function(description) {
+    var self = this;
+
+    if (!isActionAllowedInSignalingState('setRemoteDescription',
+        description.type, this.signalingState)) {
+      var e = new Error('Can not set remote ' + description.type +
+          ' in state ' + this.signalingState);
+      e.name = 'InvalidStateError';
+      if (arguments.length > 2 && typeof arguments[2] === 'function') {
+        window.setTimeout(arguments[2], 0, e);
+      }
+      return Promise.reject(e);
+    }
+
+    var streams = {};
+    var receiverList = [];
+    var sections = SDPUtils.splitSections(description.sdp);
+    var sessionpart = sections.shift();
+    var isIceLite = SDPUtils.matchPrefix(sessionpart,
+        'a=ice-lite').length > 0;
+    var usingBundle = SDPUtils.matchPrefix(sessionpart,
+        'a=group:BUNDLE ').length > 0;
+    this.usingBundle = usingBundle;
+    var iceOptions = SDPUtils.matchPrefix(sessionpart,
+        'a=ice-options:')[0];
+    if (iceOptions) {
+      this.canTrickleIceCandidates = iceOptions.substr(14).split(' ')
+          .indexOf('trickle') >= 0;
+    } else {
+      this.canTrickleIceCandidates = false;
+    }
+
+    sections.forEach(function(mediaSection, sdpMLineIndex) {
+      var lines = SDPUtils.splitLines(mediaSection);
+      var kind = SDPUtils.getKind(mediaSection);
+      var rejected = SDPUtils.isRejected(mediaSection);
+      var protocol = lines[0].substr(2).split(' ')[2];
+
+      var direction = SDPUtils.getDirection(mediaSection, sessionpart);
+      var remoteMsid = SDPUtils.parseMsid(mediaSection);
+
+      var mid = SDPUtils.getMid(mediaSection) || SDPUtils.generateIdentifier();
+
+      // Reject datachannels which are not implemented yet.
+      if (kind === 'application' && protocol === 'DTLS/SCTP') {
+        self.transceivers[sdpMLineIndex] = {
+          mid: mid,
+          isDatachannel: true
+        };
+        return;
+      }
+
+      var transceiver;
+      var iceGatherer;
+      var iceTransport;
+      var dtlsTransport;
+      var rtpReceiver;
+      var sendEncodingParameters;
+      var recvEncodingParameters;
+      var localCapabilities;
+
+      var track;
+      // FIXME: ensure the mediaSection has rtcp-mux set.
+      var remoteCapabilities = SDPUtils.parseRtpParameters(mediaSection);
+      var remoteIceParameters;
+      var remoteDtlsParameters;
+      if (!rejected) {
+        remoteIceParameters = SDPUtils.getIceParameters(mediaSection,
+            sessionpart);
+        remoteDtlsParameters = SDPUtils.getDtlsParameters(mediaSection,
+            sessionpart);
+        remoteDtlsParameters.role = 'client';
+      }
+      recvEncodingParameters =
+          SDPUtils.parseRtpEncodingParameters(mediaSection);
+
+      var rtcpParameters = SDPUtils.parseRtcpParameters(mediaSection);
+
+      var isComplete = SDPUtils.matchPrefix(mediaSection,
+          'a=end-of-candidates', sessionpart).length > 0;
+      var cands = SDPUtils.matchPrefix(mediaSection, 'a=candidate:')
+          .map(function(cand) {
+            return SDPUtils.parseCandidate(cand);
+          })
+          .filter(function(cand) {
+            return cand.component === '1' || cand.component === 1;
+          });
+
+      // Check if we can use BUNDLE and dispose transports.
+      if ((description.type === 'offer' || description.type === 'answer') &&
+          !rejected && usingBundle && sdpMLineIndex > 0 &&
+          self.transceivers[sdpMLineIndex]) {
+        self._disposeIceAndDtlsTransports(sdpMLineIndex);
+        self.transceivers[sdpMLineIndex].iceGatherer =
+            self.transceivers[0].iceGatherer;
+        self.transceivers[sdpMLineIndex].iceTransport =
+            self.transceivers[0].iceTransport;
+        self.transceivers[sdpMLineIndex].dtlsTransport =
+            self.transceivers[0].dtlsTransport;
+        if (self.transceivers[sdpMLineIndex].rtpSender) {
+          self.transceivers[sdpMLineIndex].rtpSender.setTransport(
+              self.transceivers[0].dtlsTransport);
+        }
+        if (self.transceivers[sdpMLineIndex].rtpReceiver) {
+          self.transceivers[sdpMLineIndex].rtpReceiver.setTransport(
+              self.transceivers[0].dtlsTransport);
+        }
+      }
+      if (description.type === 'offer' && !rejected) {
+        transceiver = self.transceivers[sdpMLineIndex] ||
+            self._createTransceiver(kind);
+        transceiver.mid = mid;
+
+        if (!transceiver.iceGatherer) {
+          transceiver.iceGatherer = usingBundle && sdpMLineIndex > 0 ?
+              self.transceivers[0].iceGatherer :
+              self._createIceGatherer(mid, sdpMLineIndex);
+        }
+
+        if (isComplete && cands.length &&
+            (!usingBundle || sdpMLineIndex === 0)) {
+          transceiver.iceTransport.setRemoteCandidates(cands);
+        }
+
+        localCapabilities = window.RTCRtpReceiver.getCapabilities(kind);
+
+        // filter RTX until additional stuff needed for RTX is implemented
+        // in adapter.js
+        if (edgeVersion < 15019) {
+          localCapabilities.codecs = localCapabilities.codecs.filter(
+              function(codec) {
+                return codec.name !== 'rtx';
+              });
+        }
+
+        sendEncodingParameters = [{
+          ssrc: (2 * sdpMLineIndex + 2) * 1001
+        }];
+
+        if (direction === 'sendrecv' || direction === 'sendonly') {
+          rtpReceiver = new window.RTCRtpReceiver(transceiver.dtlsTransport,
+              kind);
+
+          track = rtpReceiver.track;
+          // FIXME: does not work with Plan B.
+          if (remoteMsid) {
+            if (!streams[remoteMsid.stream]) {
+              streams[remoteMsid.stream] = new window.MediaStream();
+              Object.defineProperty(streams[remoteMsid.stream], 'id', {
+                get: function() {
+                  return remoteMsid.stream;
+                }
+              });
+            }
+            Object.defineProperty(track, 'id', {
+              get: function() {
+                return remoteMsid.track;
+              }
+            });
+            streams[remoteMsid.stream].addTrack(track);
+            receiverList.push([track, rtpReceiver,
+              streams[remoteMsid.stream]]);
+          } else {
+            if (!streams.default) {
+              streams.default = new window.MediaStream();
+            }
+            streams.default.addTrack(track);
+            receiverList.push([track, rtpReceiver, streams.default]);
+          }
+        }
+
+        transceiver.localCapabilities = localCapabilities;
+        transceiver.remoteCapabilities = remoteCapabilities;
+        transceiver.rtpReceiver = rtpReceiver;
+        transceiver.rtcpParameters = rtcpParameters;
+        transceiver.sendEncodingParameters = sendEncodingParameters;
+        transceiver.recvEncodingParameters = recvEncodingParameters;
+
+        // Start the RTCRtpReceiver now. The RTPSender is started in
+        // setLocalDescription.
+        self._transceive(self.transceivers[sdpMLineIndex],
+            false,
+            direction === 'sendrecv' || direction === 'sendonly');
+      } else if (description.type === 'answer' && !rejected) {
+        transceiver = self.transceivers[sdpMLineIndex];
+        iceGatherer = transceiver.iceGatherer;
+        iceTransport = transceiver.iceTransport;
+        dtlsTransport = transceiver.dtlsTransport;
+        rtpReceiver = transceiver.rtpReceiver;
+        sendEncodingParameters = transceiver.sendEncodingParameters;
+        localCapabilities = transceiver.localCapabilities;
+
+        self.transceivers[sdpMLineIndex].recvEncodingParameters =
+            recvEncodingParameters;
+        self.transceivers[sdpMLineIndex].remoteCapabilities =
+            remoteCapabilities;
+        self.transceivers[sdpMLineIndex].rtcpParameters = rtcpParameters;
+
+        if (!usingBundle || sdpMLineIndex === 0) {
+          if ((isIceLite || isComplete) && cands.length) {
+            iceTransport.setRemoteCandidates(cands);
+          }
+          iceTransport.start(iceGatherer, remoteIceParameters,
+              'controlling');
+          dtlsTransport.start(remoteDtlsParameters);
+        }
+
+        self._transceive(transceiver,
+            direction === 'sendrecv' || direction === 'recvonly',
+            direction === 'sendrecv' || direction === 'sendonly');
+
+        if (rtpReceiver &&
+            (direction === 'sendrecv' || direction === 'sendonly')) {
+          track = rtpReceiver.track;
+          if (remoteMsid) {
+            if (!streams[remoteMsid.stream]) {
+              streams[remoteMsid.stream] = new window.MediaStream();
+            }
+            streams[remoteMsid.stream].addTrack(track);
+            receiverList.push([track, rtpReceiver, streams[remoteMsid.stream]]);
+          } else {
+            if (!streams.default) {
+              streams.default = new window.MediaStream();
+            }
+            streams.default.addTrack(track);
+            receiverList.push([track, rtpReceiver, streams.default]);
+          }
+        } else {
+          // FIXME: actually the receiver should be created later.
+          delete transceiver.rtpReceiver;
+        }
+      }
+    });
+
+    this.remoteDescription = {
+      type: description.type,
+      sdp: description.sdp
+    };
+    switch (description.type) {
+      case 'offer':
+        this._updateSignalingState('have-remote-offer');
+        break;
+      case 'answer':
+        this._updateSignalingState('stable');
+        break;
+      default:
+        throw new TypeError('unsupported type "' + description.type +
+            '"');
+    }
+    Object.keys(streams).forEach(function(sid) {
+      var stream = streams[sid];
+      if (stream.getTracks().length) {
+        self.remoteStreams.push(stream);
+        var event = new Event('addstream');
+        event.stream = stream;
+        self.dispatchEvent(event);
+        if (self.onaddstream !== null) {
+          window.setTimeout(function() {
+            self.onaddstream(event);
+          }, 0);
+        }
+
+        receiverList.forEach(function(item) {
+          var track = item[0];
+          var receiver = item[1];
+          if (stream.id !== item[2].id) {
+            return;
+          }
+          var trackEvent = new Event('track');
+          trackEvent.track = track;
+          trackEvent.receiver = receiver;
+          trackEvent.streams = [stream];
+          self.dispatchEvent(trackEvent);
+          if (self.ontrack !== null) {
+            window.setTimeout(function() {
+              self.ontrack(trackEvent);
+            }, 0);
+          }
+        });
+      }
+    });
+
+    // check whether addIceCandidate({}) was called within four seconds after
+    // setRemoteDescription.
+    window.setTimeout(function() {
+      if (!(self && self.transceivers)) {
+        return;
+      }
+      self.transceivers.forEach(function(transceiver) {
+        if (transceiver.iceTransport &&
+            transceiver.iceTransport.state === 'new' &&
+            transceiver.iceTransport.getRemoteCandidates().length > 0) {
+          console.warn('Timeout for addRemoteCandidate. Consider sending ' +
+              'an end-of-candidates notification');
+          transceiver.iceTransport.addRemoteCandidate({});
+        }
+      });
+    }, 4000);
+
+    if (arguments.length > 1 && typeof arguments[1] === 'function') {
+      window.setTimeout(arguments[1], 0);
+    }
+    return Promise.resolve();
+  };
+
+  RTCPeerConnection.prototype.close = function() {
+    this.transceivers.forEach(function(transceiver) {
+      /* not yet
+      if (transceiver.iceGatherer) {
+        transceiver.iceGatherer.close();
+      }
+      */
+      if (transceiver.iceTransport) {
+        transceiver.iceTransport.stop();
+      }
+      if (transceiver.dtlsTransport) {
+        transceiver.dtlsTransport.stop();
+      }
+      if (transceiver.rtpSender) {
+        transceiver.rtpSender.stop();
+      }
+      if (transceiver.rtpReceiver) {
+        transceiver.rtpReceiver.stop();
+      }
+    });
+    // FIXME: clean up tracks, local streams, remote streams, etc
+    this._updateSignalingState('closed');
+  };
+
+  // Update the signaling state.
+  RTCPeerConnection.prototype._updateSignalingState = function(newState) {
+    this.signalingState = newState;
+    var event = new Event('signalingstatechange');
+    this.dispatchEvent(event);
+    if (this.onsignalingstatechange !== null) {
+      this.onsignalingstatechange(event);
+    }
+  };
+
+  // Determine whether to fire the negotiationneeded event.
+  RTCPeerConnection.prototype._maybeFireNegotiationNeeded = function() {
+    var self = this;
+    if (this.signalingState !== 'stable' || this.needNegotiation === true) {
+      return;
+    }
+    this.needNegotiation = true;
+    window.setTimeout(function() {
+      if (self.needNegotiation === false) {
+        return;
+      }
+      self.needNegotiation = false;
+      var event = new Event('negotiationneeded');
+      self.dispatchEvent(event);
+      if (self.onnegotiationneeded !== null) {
+        self.onnegotiationneeded(event);
+      }
+    }, 0);
+  };
+
+  // Update the connection state.
+  RTCPeerConnection.prototype._updateConnectionState = function() {
+    var self = this;
+    var newState;
+    var states = {
+      'new': 0,
+      closed: 0,
+      connecting: 0,
+      checking: 0,
+      connected: 0,
+      completed: 0,
+      disconnected: 0,
+      failed: 0
+    };
+    this.transceivers.forEach(function(transceiver) {
+      states[transceiver.iceTransport.state]++;
+      states[transceiver.dtlsTransport.state]++;
+    });
+    // ICETransport.completed and connected are the same for this purpose.
+    states.connected += states.completed;
+
+    newState = 'new';
+    if (states.failed > 0) {
+      newState = 'failed';
+    } else if (states.connecting > 0 || states.checking > 0) {
+      newState = 'connecting';
+    } else if (states.disconnected > 0) {
+      newState = 'disconnected';
+    } else if (states.new > 0) {
+      newState = 'new';
+    } else if (states.connected > 0 || states.completed > 0) {
+      newState = 'connected';
+    }
+
+    if (newState !== self.iceConnectionState) {
+      self.iceConnectionState = newState;
+      var event = new Event('iceconnectionstatechange');
+      this.dispatchEvent(event);
+      if (this.oniceconnectionstatechange !== null) {
+        this.oniceconnectionstatechange(event);
+      }
+    }
+  };
+
+  RTCPeerConnection.prototype.createOffer = function() {
+    var self = this;
+    if (this._pendingOffer) {
+      throw new Error('createOffer called while there is a pending offer.');
+    }
+    var offerOptions;
+    if (arguments.length === 1 && typeof arguments[0] !== 'function') {
+      offerOptions = arguments[0];
+    } else if (arguments.length === 3) {
+      offerOptions = arguments[2];
+    }
+
+    var numAudioTracks = this.transceivers.filter(function(t) {
+      return t.kind === 'audio';
+    }).length;
+    var numVideoTracks = this.transceivers.filter(function(t) {
+      return t.kind === 'video';
+    }).length;
+
+    // Determine number of audio and video tracks we need to send/recv.
+    if (offerOptions) {
+      // Reject Chrome legacy constraints.
+      if (offerOptions.mandatory || offerOptions.optional) {
+        throw new TypeError(
+            'Legacy mandatory/optional constraints not supported.');
+      }
+      if (offerOptions.offerToReceiveAudio !== undefined) {
+        if (offerOptions.offerToReceiveAudio === true) {
+          numAudioTracks = 1;
+        } else if (offerOptions.offerToReceiveAudio === false) {
+          numAudioTracks = 0;
+        } else {
+          numAudioTracks = offerOptions.offerToReceiveAudio;
+        }
+      }
+      if (offerOptions.offerToReceiveVideo !== undefined) {
+        if (offerOptions.offerToReceiveVideo === true) {
+          numVideoTracks = 1;
+        } else if (offerOptions.offerToReceiveVideo === false) {
+          numVideoTracks = 0;
+        } else {
+          numVideoTracks = offerOptions.offerToReceiveVideo;
+        }
+      }
+    }
+
+    this.transceivers.forEach(function(transceiver) {
+      if (transceiver.kind === 'audio') {
+        numAudioTracks--;
+        if (numAudioTracks < 0) {
+          transceiver.wantReceive = false;
+        }
+      } else if (transceiver.kind === 'video') {
+        numVideoTracks--;
+        if (numVideoTracks < 0) {
+          transceiver.wantReceive = false;
+        }
+      }
+    });
+
+    // Create M-lines for recvonly streams.
+    while (numAudioTracks > 0 || numVideoTracks > 0) {
+      if (numAudioTracks > 0) {
+        this._createTransceiver('audio');
+        numAudioTracks--;
+      }
+      if (numVideoTracks > 0) {
+        this._createTransceiver('video');
+        numVideoTracks--;
+      }
+    }
+    // reorder tracks
+    var transceivers = sortTracks(this.transceivers);
+
+    var sdp = SDPUtils.writeSessionBoilerplate(this._sdpSessionId);
+    transceivers.forEach(function(transceiver, sdpMLineIndex) {
+      // For each track, create an ice gatherer, ice transport,
+      // dtls transport, potentially rtpsender and rtpreceiver.
+      var track = transceiver.track;
+      var kind = transceiver.kind;
+      var mid = SDPUtils.generateIdentifier();
+      transceiver.mid = mid;
+
+      if (!transceiver.iceGatherer) {
+        transceiver.iceGatherer = self.usingBundle && sdpMLineIndex > 0 ?
+            transceivers[0].iceGatherer :
+            self._createIceGatherer(mid, sdpMLineIndex);
+      }
+
+      var localCapabilities = window.RTCRtpSender.getCapabilities(kind);
+      // filter RTX until additional stuff needed for RTX is implemented
+      // in adapter.js
+      if (edgeVersion < 15019) {
+        localCapabilities.codecs = localCapabilities.codecs.filter(
+            function(codec) {
+              return codec.name !== 'rtx';
+            });
+      }
+      localCapabilities.codecs.forEach(function(codec) {
+        // work around https://bugs.chromium.org/p/webrtc/issues/detail?id=6552
+        // by adding level-asymmetry-allowed=1
+        if (codec.name === 'H264' &&
+            codec.parameters['level-asymmetry-allowed'] === undefined) {
+          codec.parameters['level-asymmetry-allowed'] = '1';
+        }
+      });
+
+      // generate an ssrc now, to be used later in rtpSender.send
+      var sendEncodingParameters = [{
+        ssrc: (2 * sdpMLineIndex + 1) * 1001
+      }];
+      if (track) {
+        // add RTX
+        if (edgeVersion >= 15019 && kind === 'video') {
+          sendEncodingParameters[0].rtx = {
+            ssrc: (2 * sdpMLineIndex + 1) * 1001 + 1
+          };
+        }
+      }
+
+      if (transceiver.wantReceive) {
+        transceiver.rtpReceiver = new window.RTCRtpReceiver(
+          transceiver.dtlsTransport,
+          kind
+        );
+      }
+
+      transceiver.localCapabilities = localCapabilities;
+      transceiver.sendEncodingParameters = sendEncodingParameters;
+    });
+
+    // always offer BUNDLE and dispose on return if not supported.
+    if (this._config.bundlePolicy !== 'max-compat') {
+      sdp += 'a=group:BUNDLE ' + transceivers.map(function(t) {
+        return t.mid;
+      }).join(' ') + '\r\n';
+    }
+    sdp += 'a=ice-options:trickle\r\n';
+
+    transceivers.forEach(function(transceiver, sdpMLineIndex) {
+      sdp += SDPUtils.writeMediaSection(transceiver,
+          transceiver.localCapabilities, 'offer', transceiver.stream);
+      sdp += 'a=rtcp-rsize\r\n';
+    });
+
+    this._pendingOffer = transceivers;
+    var desc = new window.RTCSessionDescription({
+      type: 'offer',
+      sdp: sdp
+    });
+    if (arguments.length && typeof arguments[0] === 'function') {
+      window.setTimeout(arguments[0], 0, desc);
+    }
+    return Promise.resolve(desc);
+  };
+
+  RTCPeerConnection.prototype.createAnswer = function() {
+    var sdp = SDPUtils.writeSessionBoilerplate(this._sdpSessionId);
+    if (this.usingBundle) {
+      sdp += 'a=group:BUNDLE ' + this.transceivers.map(function(t) {
+        return t.mid;
+      }).join(' ') + '\r\n';
+    }
+    this.transceivers.forEach(function(transceiver, sdpMLineIndex) {
+      if (transceiver.isDatachannel) {
+        sdp += 'm=application 0 DTLS/SCTP 5000\r\n' +
+            'c=IN IP4 0.0.0.0\r\n' +
+            'a=mid:' + transceiver.mid + '\r\n';
+        return;
+      }
+
+      // FIXME: look at direction.
+      if (transceiver.stream) {
+        var localTrack;
+        if (transceiver.kind === 'audio') {
+          localTrack = transceiver.stream.getAudioTracks()[0];
+        } else if (transceiver.kind === 'video') {
+          localTrack = transceiver.stream.getVideoTracks()[0];
+        }
+        if (localTrack) {
+          // add RTX
+          if (edgeVersion >= 15019 && transceiver.kind === 'video') {
+            transceiver.sendEncodingParameters[0].rtx = {
+              ssrc: (2 * sdpMLineIndex + 2) * 1001 + 1
+            };
+          }
+        }
+      }
+
+      // Calculate intersection of capabilities.
+      var commonCapabilities = getCommonCapabilities(
+          transceiver.localCapabilities,
+          transceiver.remoteCapabilities);
+
+      var hasRtx = commonCapabilities.codecs.filter(function(c) {
+        return c.name.toLowerCase() === 'rtx';
+      }).length;
+      if (!hasRtx && transceiver.sendEncodingParameters[0].rtx) {
+        delete transceiver.sendEncodingParameters[0].rtx;
+      }
+
+      sdp += SDPUtils.writeMediaSection(transceiver, commonCapabilities,
+          'answer', transceiver.stream);
+      if (transceiver.rtcpParameters &&
+          transceiver.rtcpParameters.reducedSize) {
+        sdp += 'a=rtcp-rsize\r\n';
+      }
+    });
+
+    var desc = new window.RTCSessionDescription({
+      type: 'answer',
+      sdp: sdp
+    });
+    if (arguments.length && typeof arguments[0] === 'function') {
+      window.setTimeout(arguments[0], 0, desc);
+    }
+    return Promise.resolve(desc);
+  };
+
+  RTCPeerConnection.prototype.addIceCandidate = function(candidate) {
+    if (!candidate) {
+      for (var j = 0; j < this.transceivers.length; j++) {
+        this.transceivers[j].iceTransport.addRemoteCandidate({});
+        if (this.usingBundle) {
+          return Promise.resolve();
+        }
+      }
+    } else {
+      var mLineIndex = candidate.sdpMLineIndex;
+      if (candidate.sdpMid) {
+        for (var i = 0; i < this.transceivers.length; i++) {
+          if (this.transceivers[i].mid === candidate.sdpMid) {
+            mLineIndex = i;
+            break;
+          }
+        }
+      }
+      var transceiver = this.transceivers[mLineIndex];
+      if (transceiver) {
+        var cand = Object.keys(candidate.candidate).length > 0 ?
+            SDPUtils.parseCandidate(candidate.candidate) : {};
+        // Ignore Chrome's invalid candidates since Edge does not like them.
+        if (cand.protocol === 'tcp' && (cand.port === 0 || cand.port === 9)) {
+          return Promise.resolve();
+        }
+        // Ignore RTCP candidates, we assume RTCP-MUX.
+        if (cand.component &&
+            !(cand.component === '1' || cand.component === 1)) {
+          return Promise.resolve();
+        }
+        transceiver.iceTransport.addRemoteCandidate(cand);
+
+        // update the remoteDescription.
+        var sections = SDPUtils.splitSections(this.remoteDescription.sdp);
+        sections[mLineIndex + 1] += (cand.type ? candidate.candidate.trim()
+            : 'a=end-of-candidates') + '\r\n';
+        this.remoteDescription.sdp = sections.join('');
+      }
+    }
+    if (arguments.length > 1 && typeof arguments[1] === 'function') {
+      window.setTimeout(arguments[1], 0);
+    }
+    return Promise.resolve();
+  };
+
+  RTCPeerConnection.prototype.getStats = function() {
+    var promises = [];
+    this.transceivers.forEach(function(transceiver) {
+      ['rtpSender', 'rtpReceiver', 'iceGatherer', 'iceTransport',
+        'dtlsTransport'].forEach(function(method) {
+          if (transceiver[method]) {
+            promises.push(transceiver[method].getStats());
+          }
+        });
+    });
+    var cb = arguments.length > 1 && typeof arguments[1] === 'function' &&
+        arguments[1];
+    var fixStatsType = function(stat) {
+      return {
+        inboundrtp: 'inbound-rtp',
+        outboundrtp: 'outbound-rtp',
+        candidatepair: 'candidate-pair',
+        localcandidate: 'local-candidate',
+        remotecandidate: 'remote-candidate'
+      }[stat.type] || stat.type;
+    };
+    return new Promise(function(resolve) {
+      // shim getStats with maplike support
+      var results = new Map();
+      Promise.all(promises).then(function(res) {
+        res.forEach(function(result) {
+          Object.keys(result).forEach(function(id) {
+            result[id].type = fixStatsType(result[id]);
+            results.set(id, result[id]);
+          });
+        });
+        if (cb) {
+          window.setTimeout(cb, 0, results);
+        }
+        resolve(results);
+      });
+    });
+  };
+  return RTCPeerConnection;
+};
diff --git a/src/js/firefox/firefox_shim.js b/src/js/firefox/firefox_shim.js
new file mode 100644
index 0000000..93f6760
--- /dev/null
+++ b/src/js/firefox/firefox_shim.js
@@ -0,0 +1,198 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+var utils = require('../utils');
+
+var firefoxShim = {
+  shimOnTrack: function(window) {
+    if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
+        window.RTCPeerConnection.prototype)) {
+      Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
+        get: function() {
+          return this._ontrack;
+        },
+        set: function(f) {
+          if (this._ontrack) {
+            this.removeEventListener('track', this._ontrack);
+            this.removeEventListener('addstream', this._ontrackpoly);
+          }
+          this.addEventListener('track', this._ontrack = f);
+          this.addEventListener('addstream', this._ontrackpoly = function(e) {
+            e.stream.getTracks().forEach(function(track) {
+              var event = new Event('track');
+              event.track = track;
+              event.receiver = {track: track};
+              event.streams = [e.stream];
+              this.dispatchEvent(event);
+            }.bind(this));
+          }.bind(this));
+        }
+      });
+    }
+  },
+
+  shimSourceObject: function(window) {
+    // Firefox has supported mozSrcObject since FF22, unprefixed in 42.
+    if (typeof window === 'object') {
+      if (window.HTMLMediaElement &&
+        !('srcObject' in window.HTMLMediaElement.prototype)) {
+        // Shim the srcObject property, once, when HTMLMediaElement is found.
+        Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
+          get: function() {
+            return this.mozSrcObject;
+          },
+          set: function(stream) {
+            this.mozSrcObject = stream;
+          }
+        });
+      }
+    }
+  },
+
+  shimPeerConnection: function(window) {
+    var browserDetails = utils.detectBrowser(window);
+
+    if (typeof window !== 'object' || !(window.RTCPeerConnection ||
+        window.mozRTCPeerConnection)) {
+      return; // probably media.peerconnection.enabled=false in about:config
+    }
+    // The RTCPeerConnection object.
+    if (!window.RTCPeerConnection) {
+      window.RTCPeerConnection = function(pcConfig, pcConstraints) {
+        if (browserDetails.version < 38) {
+          // .urls is not supported in FF < 38.
+          // create RTCIceServers with a single url.
+          if (pcConfig && pcConfig.iceServers) {
+            var newIceServers = [];
+            for (var i = 0; i < pcConfig.iceServers.length; i++) {
+              var server = pcConfig.iceServers[i];
+              if (server.hasOwnProperty('urls')) {
+                for (var j = 0; j < server.urls.length; j++) {
+                  var newServer = {
+                    url: server.urls[j]
+                  };
+                  if (server.urls[j].indexOf('turn') === 0) {
+                    newServer.username = server.username;
+                    newServer.credential = server.credential;
+                  }
+                  newIceServers.push(newServer);
+                }
+              } else {
+                newIceServers.push(pcConfig.iceServers[i]);
+              }
+            }
+            pcConfig.iceServers = newIceServers;
+          }
+        }
+        return new window.mozRTCPeerConnection(pcConfig, pcConstraints);
+      };
+      window.RTCPeerConnection.prototype =
+          window.mozRTCPeerConnection.prototype;
+
+      // wrap static methods. Currently just generateCertificate.
+      if (window.mozRTCPeerConnection.generateCertificate) {
+        Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
+          get: function() {
+            return window.mozRTCPeerConnection.generateCertificate;
+          }
+        });
+      }
+
+      window.RTCSessionDescription = window.mozRTCSessionDescription;
+      window.RTCIceCandidate = window.mozRTCIceCandidate;
+    }
+
+    // shim away need for obsolete RTCIceCandidate/RTCSessionDescription.
+    ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
+        .forEach(function(method) {
+          var nativeMethod = window.RTCPeerConnection.prototype[method];
+          window.RTCPeerConnection.prototype[method] = function() {
+            arguments[0] = new ((method === 'addIceCandidate') ?
+                window.RTCIceCandidate :
+                window.RTCSessionDescription)(arguments[0]);
+            return nativeMethod.apply(this, arguments);
+          };
+        });
+
+    // support for addIceCandidate(null or undefined)
+    var nativeAddIceCandidate =
+        window.RTCPeerConnection.prototype.addIceCandidate;
+    window.RTCPeerConnection.prototype.addIceCandidate = function() {
+      if (!arguments[0]) {
+        if (arguments[1]) {
+          arguments[1].apply(null);
+        }
+        return Promise.resolve();
+      }
+      return nativeAddIceCandidate.apply(this, arguments);
+    };
+
+    // shim getStats with maplike support
+    var makeMapStats = function(stats) {
+      var map = new Map();
+      Object.keys(stats).forEach(function(key) {
+        map.set(key, stats[key]);
+        map[key] = stats[key];
+      });
+      return map;
+    };
+
+    var modernStatsTypes = {
+      inboundrtp: 'inbound-rtp',
+      outboundrtp: 'outbound-rtp',
+      candidatepair: 'candidate-pair',
+      localcandidate: 'local-candidate',
+      remotecandidate: 'remote-candidate'
+    };
+
+    var nativeGetStats = window.RTCPeerConnection.prototype.getStats;
+    window.RTCPeerConnection.prototype.getStats = function(
+      selector,
+      onSucc,
+      onErr
+    ) {
+      return nativeGetStats.apply(this, [selector || null])
+        .then(function(stats) {
+          if (browserDetails.version < 48) {
+            stats = makeMapStats(stats);
+          }
+          if (browserDetails.version < 53 && !onSucc) {
+            // Shim only promise getStats with spec-hyphens in type names
+            // Leave callback version alone; misc old uses of forEach before Map
+            try {
+              stats.forEach(function(stat) {
+                stat.type = modernStatsTypes[stat.type] || stat.type;
+              });
+            } catch (e) {
+              if (e.name !== 'TypeError') {
+                throw e;
+              }
+              // Avoid TypeError: "type" is read-only, in old versions. 34-43ish
+              stats.forEach(function(stat, i) {
+                stats.set(i, Object.assign({}, stat, {
+                  type: modernStatsTypes[stat.type] || stat.type
+                }));
+              });
+            }
+          }
+          return stats;
+        })
+        .then(onSucc, onErr);
+    };
+  }
+};
+
+// Expose public methods.
+module.exports = {
+  shimOnTrack: firefoxShim.shimOnTrack,
+  shimSourceObject: firefoxShim.shimSourceObject,
+  shimPeerConnection: firefoxShim.shimPeerConnection,
+  shimGetUserMedia: require('./getusermedia')
+};
diff --git a/src/js/firefox/getusermedia.js b/src/js/firefox/getusermedia.js
new file mode 100644
index 0000000..70fc77d
--- /dev/null
+++ b/src/js/firefox/getusermedia.js
@@ -0,0 +1,209 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+var utils = require('../utils');
+var logging = utils.log;
+
+// Expose public methods.
+module.exports = function(window) {
+  var browserDetails = utils.detectBrowser(window);
+  var navigator = window && window.navigator;
+  var MediaStreamTrack = window && window.MediaStreamTrack;
+
+  var shimError_ = function(e) {
+    return {
+      name: {
+        InternalError: 'NotReadableError',
+        NotSupportedError: 'TypeError',
+        PermissionDeniedError: 'NotAllowedError',
+        SecurityError: 'NotAllowedError'
+      }[e.name] || e.name,
+      message: {
+        'The operation is insecure.': 'The request is not allowed by the ' +
+        'user agent or the platform in the current context.'
+      }[e.message] || e.message,
+      constraint: e.constraint,
+      toString: function() {
+        return this.name + (this.message && ': ') + this.message;
+      }
+    };
+  };
+
+  // getUserMedia constraints shim.
+  var getUserMedia_ = function(constraints, onSuccess, onError) {
+    var constraintsToFF37_ = function(c) {
+      if (typeof c !== 'object' || c.require) {
+        return c;
+      }
+      var require = [];
+      Object.keys(c).forEach(function(key) {
+        if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
+          return;
+        }
+        var r = c[key] = (typeof c[key] === 'object') ?
+            c[key] : {ideal: c[key]};
+        if (r.min !== undefined ||
+            r.max !== undefined || r.exact !== undefined) {
+          require.push(key);
+        }
+        if (r.exact !== undefined) {
+          if (typeof r.exact === 'number') {
+            r. min = r.max = r.exact;
+          } else {
+            c[key] = r.exact;
+          }
+          delete r.exact;
+        }
+        if (r.ideal !== undefined) {
+          c.advanced = c.advanced || [];
+          var oc = {};
+          if (typeof r.ideal === 'number') {
+            oc[key] = {min: r.ideal, max: r.ideal};
+          } else {
+            oc[key] = r.ideal;
+          }
+          c.advanced.push(oc);
+          delete r.ideal;
+          if (!Object.keys(r).length) {
+            delete c[key];
+          }
+        }
+      });
+      if (require.length) {
+        c.require = require;
+      }
+      return c;
+    };
+    constraints = JSON.parse(JSON.stringify(constraints));
+    if (browserDetails.version < 38) {
+      logging('spec: ' + JSON.stringify(constraints));
+      if (constraints.audio) {
+        constraints.audio = constraintsToFF37_(constraints.audio);
+      }
+      if (constraints.video) {
+        constraints.video = constraintsToFF37_(constraints.video);
+      }
+      logging('ff37: ' + JSON.stringify(constraints));
+    }
+    return navigator.mozGetUserMedia(constraints, onSuccess, function(e) {
+      onError(shimError_(e));
+    });
+  };
+
+  // Returns the result of getUserMedia as a Promise.
+  var getUserMediaPromise_ = function(constraints) {
+    return new Promise(function(resolve, reject) {
+      getUserMedia_(constraints, resolve, reject);
+    });
+  };
+
+  // Shim for mediaDevices on older versions.
+  if (!navigator.mediaDevices) {
+    navigator.mediaDevices = {getUserMedia: getUserMediaPromise_,
+      addEventListener: function() { },
+      removeEventListener: function() { }
+    };
+  }
+  navigator.mediaDevices.enumerateDevices =
+      navigator.mediaDevices.enumerateDevices || function() {
+        return new Promise(function(resolve) {
+          var infos = [
+            {kind: 'audioinput', deviceId: 'default', label: '', groupId: ''},
+            {kind: 'videoinput', deviceId: 'default', label: '', groupId: ''}
+          ];
+          resolve(infos);
+        });
+      };
+
+  if (browserDetails.version < 41) {
+    // Work around http://bugzil.la/1169665
+    var orgEnumerateDevices =
+        navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices);
+    navigator.mediaDevices.enumerateDevices = function() {
+      return orgEnumerateDevices().then(undefined, function(e) {
+        if (e.name === 'NotFoundError') {
+          return [];
+        }
+        throw e;
+      });
+    };
+  }
+  if (browserDetails.version < 49) {
+    var origGetUserMedia = navigator.mediaDevices.getUserMedia.
+        bind(navigator.mediaDevices);
+    navigator.mediaDevices.getUserMedia = function(c) {
+      return origGetUserMedia(c).then(function(stream) {
+        // Work around https://bugzil.la/802326
+        if (c.audio && !stream.getAudioTracks().length ||
+            c.video && !stream.getVideoTracks().length) {
+          stream.getTracks().forEach(function(track) {
+            track.stop();
+          });
+          throw new DOMException('The object can not be found here.',
+                                 'NotFoundError');
+        }
+        return stream;
+      }, function(e) {
+        return Promise.reject(shimError_(e));
+      });
+    };
+  }
+  if (!(browserDetails.version > 55 &&
+      'autoGainControl' in navigator.mediaDevices.getSupportedConstraints())) {
+    var remap = function(obj, a, b) {
+      if (a in obj && !(b in obj)) {
+        obj[b] = obj[a];
+        delete obj[a];
+      }
+    };
+
+    var nativeGetUserMedia = navigator.mediaDevices.getUserMedia.
+        bind(navigator.mediaDevices);
+    navigator.mediaDevices.getUserMedia = function(c) {
+      if (typeof c === 'object' && typeof c.audio === 'object') {
+        c = JSON.parse(JSON.stringify(c));
+        remap(c.audio, 'autoGainControl', 'mozAutoGainControl');
+        remap(c.audio, 'noiseSuppression', 'mozNoiseSuppression');
+      }
+      return nativeGetUserMedia(c);
+    };
+
+    if (MediaStreamTrack && MediaStreamTrack.prototype.getSettings) {
+      var nativeGetSettings = MediaStreamTrack.prototype.getSettings;
+      MediaStreamTrack.prototype.getSettings = function() {
+        var obj = nativeGetSettings.apply(this, arguments);
+        remap(obj, 'mozAutoGainControl', 'autoGainControl');
+        remap(obj, 'mozNoiseSuppression', 'noiseSuppression');
+        return obj;
+      };
+    }
+
+    if (MediaStreamTrack && MediaStreamTrack.prototype.applyConstraints) {
+      var nativeApplyConstraints = MediaStreamTrack.prototype.applyConstraints;
+      MediaStreamTrack.prototype.applyConstraints = function(c) {
+        if (this.kind === 'audio' && typeof c === 'object') {
+          c = JSON.parse(JSON.stringify(c));
+          remap(c, 'autoGainControl', 'mozAutoGainControl');
+          remap(c, 'noiseSuppression', 'mozNoiseSuppression');
+        }
+        return nativeApplyConstraints.apply(this, [c]);
+      };
+    }
+  }
+  navigator.getUserMedia = function(constraints, onSuccess, onError) {
+    if (browserDetails.version < 44) {
+      return getUserMedia_(constraints, onSuccess, onError);
+    }
+    // Replace Firefox 44+'s deprecation warning with unprefixed version.
+    console.warn('navigator.getUserMedia has been replaced by ' +
+                 'navigator.mediaDevices.getUserMedia');
+    navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
+  };
+};
diff --git a/src/js/safari/safari_shim.js b/src/js/safari/safari_shim.js
new file mode 100644
index 0000000..7300f74
--- /dev/null
+++ b/src/js/safari/safari_shim.js
@@ -0,0 +1,251 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+'use strict';
+var utils = require('../utils');
+
+var safariShim = {
+  // TODO: DrAlex, should be here, double check against LayoutTests
+
+  // TODO: once the back-end for the mac port is done, add.
+  // TODO: check for webkitGTK+
+  // shimPeerConnection: function() { },
+
+  shimLocalStreamsAPI: function(window) {
+    if (typeof window !== 'object' || !window.RTCPeerConnection) {
+      return;
+    }
+    if (!('getLocalStreams' in window.RTCPeerConnection.prototype)) {
+      window.RTCPeerConnection.prototype.getLocalStreams = function() {
+        if (!this._localStreams) {
+          this._localStreams = [];
+        }
+        return this._localStreams;
+      };
+    }
+    if (!('getStreamById' in window.RTCPeerConnection.prototype)) {
+      window.RTCPeerConnection.prototype.getStreamById = function(id) {
+        var result = null;
+        if (this._localStreams) {
+          this._localStreams.forEach(function(stream) {
+            if (stream.id === id) {
+              result = stream;
+            }
+          });
+        }
+        if (this._remoteStreams) {
+          this._remoteStreams.forEach(function(stream) {
+            if (stream.id === id) {
+              result = stream;
+            }
+          });
+        }
+        return result;
+      };
+    }
+    if (!('addStream' in window.RTCPeerConnection.prototype)) {
+      var _addTrack = window.RTCPeerConnection.prototype.addTrack;
+      window.RTCPeerConnection.prototype.addStream = function(stream) {
+        if (!this._localStreams) {
+          this._localStreams = [];
+        }
+        if (this._localStreams.indexOf(stream) === -1) {
+          this._localStreams.push(stream);
+        }
+        var self = this;
+        stream.getTracks().forEach(function(track) {
+          _addTrack.call(self, track, stream);
+        });
+      };
+
+      window.RTCPeerConnection.prototype.addTrack = function(track, stream) {
+        if (stream) {
+          if (!this._localStreams) {
+            this._localStreams = [stream];
+          } else if (this._localStreams.indexOf(stream) === -1) {
+            this._localStreams.push(stream);
+          }
+        }
+        _addTrack.call(this, track, stream);
+      };
+    }
+    if (!('removeStream' in window.RTCPeerConnection.prototype)) {
+      window.RTCPeerConnection.prototype.removeStream = function(stream) {
+        if (!this._localStreams) {
+          this._localStreams = [];
+        }
+        var index = this._localStreams.indexOf(stream);
+        if (index === -1) {
+          return;
+        }
+        this._localStreams.splice(index, 1);
+        var self = this;
+        var tracks = stream.getTracks();
+        this.getSenders().forEach(function(sender) {
+          if (tracks.indexOf(sender.track) !== -1) {
+            self.removeTrack(sender);
+          }
+        });
+      };
+    }
+  },
+  shimRemoteStreamsAPI: function(window) {
+    if (typeof window !== 'object' || !window.RTCPeerConnection) {
+      return;
+    }
+    if (!('getRemoteStreams' in window.RTCPeerConnection.prototype)) {
+      window.RTCPeerConnection.prototype.getRemoteStreams = function() {
+        return this._remoteStreams ? this._remoteStreams : [];
+      };
+    }
+    if (!('onaddstream' in window.RTCPeerConnection.prototype)) {
+      Object.defineProperty(window.RTCPeerConnection.prototype, 'onaddstream', {
+        get: function() {
+          return this._onaddstream;
+        },
+        set: function(f) {
+          if (this._onaddstream) {
+            this.removeEventListener('addstream', this._onaddstream);
+            this.removeEventListener('track', this._onaddstreampoly);
+          }
+          this.addEventListener('addstream', this._onaddstream = f);
+          this.addEventListener('track', this._onaddstreampoly = function(e) {
+            var stream = e.streams[0];
+            if (!this._remoteStreams) {
+              this._remoteStreams = [];
+            }
+            if (this._remoteStreams.indexOf(stream) >= 0) {
+              return;
+            }
+            this._remoteStreams.push(stream);
+            var event = new Event('addstream');
+            event.stream = e.streams[0];
+            this.dispatchEvent(event);
+          }.bind(this));
+        }
+      });
+    }
+  },
+  shimCallbacksAPI: function(window) {
+    if (typeof window !== 'object' || !window.RTCPeerConnection) {
+      return;
+    }
+    var prototype = window.RTCPeerConnection.prototype;
+    var createOffer = prototype.createOffer;
+    var createAnswer = prototype.createAnswer;
+    var setLocalDescription = prototype.setLocalDescription;
+    var setRemoteDescription = prototype.setRemoteDescription;
+    var addIceCandidate = prototype.addIceCandidate;
+
+    prototype.createOffer = function(successCallback, failureCallback) {
+      var options = (arguments.length >= 2) ? arguments[2] : arguments[0];
+      var promise = createOffer.apply(this, [options]);
+      if (!failureCallback) {
+        return promise;
+      }
+      promise.then(successCallback, failureCallback);
+      return Promise.resolve();
+    };
+
+    prototype.createAnswer = function(successCallback, failureCallback) {
+      var options = (arguments.length >= 2) ? arguments[2] : arguments[0];
+      var promise = createAnswer.apply(this, [options]);
+      if (!failureCallback) {
+        return promise;
+      }
+      promise.then(successCallback, failureCallback);
+      return Promise.resolve();
+    };
+
+    var withCallback = function(description, successCallback, failureCallback) {
+      var promise = setLocalDescription.apply(this, [description]);
+      if (!failureCallback) {
+        return promise;
+      }
+      promise.then(successCallback, failureCallback);
+      return Promise.resolve();
+    };
+    prototype.setLocalDescription = withCallback;
+
+    withCallback = function(description, successCallback, failureCallback) {
+      var promise = setRemoteDescription.apply(this, [description]);
+      if (!failureCallback) {
+        return promise;
+      }
+      promise.then(successCallback, failureCallback);
+      return Promise.resolve();
+    };
+    prototype.setRemoteDescription = withCallback;
+
+    withCallback = function(candidate, successCallback, failureCallback) {
+      var promise = addIceCandidate.apply(this, [candidate]);
+      if (!failureCallback) {
+        return promise;
+      }
+      promise.then(successCallback, failureCallback);
+      return Promise.resolve();
+    };
+    prototype.addIceCandidate = withCallback;
+  },
+  shimGetUserMedia: function(window) {
+    var navigator = window && window.navigator;
+
+    if (!navigator.getUserMedia) {
+      if (navigator.webkitGetUserMedia) {
+        navigator.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
+      } else if (navigator.mediaDevices &&
+          navigator.mediaDevices.getUserMedia) {
+        navigator.getUserMedia = function(constraints, cb, errcb) {
+          navigator.mediaDevices.getUserMedia(constraints)
+          .then(cb, errcb);
+        }.bind(navigator);
+      }
+    }
+  },
+  shimRTCIceServerUrls: function(window) {
+    // migrate from non-spec RTCIceServer.url to RTCIceServer.urls
+    var OrigPeerConnection = window.RTCPeerConnection;
+    window.RTCPeerConnection = function(pcConfig, pcConstraints) {
+      if (pcConfig && pcConfig.iceServers) {
+        var newIceServers = [];
+        for (var i = 0; i < pcConfig.iceServers.length; i++) {
+          var server = pcConfig.iceServers[i];
+          if (!server.hasOwnProperty('urls') &&
+              server.hasOwnProperty('url')) {
+            utils.deprecated('RTCIceServer.url', 'RTCIceServer.urls');
+            server = JSON.parse(JSON.stringify(server));
+            server.urls = server.url;
+            delete server.url;
+            newIceServers.push(server);
+          } else {
+            newIceServers.push(pcConfig.iceServers[i]);
+          }
+        }
+        pcConfig.iceServers = newIceServers;
+      }
+      return new OrigPeerConnection(pcConfig, pcConstraints);
+    };
+    window.RTCPeerConnection.prototype = OrigPeerConnection.prototype;
+    // wrap static methods. Currently just generateCertificate.
+    Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
+      get: function() {
+        return OrigPeerConnection.generateCertificate;
+      }
+    });
+  }
+};
+
+// Expose public methods.
+module.exports = {
+  shimCallbacksAPI: safariShim.shimCallbacksAPI,
+  shimLocalStreamsAPI: safariShim.shimLocalStreamsAPI,
+  shimRemoteStreamsAPI: safariShim.shimRemoteStreamsAPI,
+  shimGetUserMedia: safariShim.shimGetUserMedia,
+  shimRTCIceServerUrls: safariShim.shimRTCIceServerUrls
+  // TODO
+  // shimPeerConnection: safariShim.shimPeerConnection
+};
diff --git a/src/js/utils.js b/src/js/utils.js
new file mode 100644
index 0000000..9362d8d
--- /dev/null
+++ b/src/js/utils.js
@@ -0,0 +1,197 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+var logDisabled_ = true;
+var deprecationWarnings_ = true;
+
+// Utility methods.
+var utils = {
+  disableLog: function(bool) {
+    if (typeof bool !== 'boolean') {
+      return new Error('Argument type: ' + typeof bool +
+          '. Please use a boolean.');
+    }
+    logDisabled_ = bool;
+    return (bool) ? 'adapter.js logging disabled' :
+        'adapter.js logging enabled';
+  },
+
+  /**
+   * Disable or enable deprecation warnings
+   * @param {!boolean} bool set to true to disable warnings.
+   */
+  disableWarnings: function(bool) {
+    if (typeof bool !== 'boolean') {
+      return new Error('Argument type: ' + typeof bool +
+          '. Please use a boolean.');
+    }
+    deprecationWarnings_ = !bool;
+    return 'adapter.js deprecation warnings ' + (bool ? 'disabled' : 'enabled');
+  },
+
+  log: function() {
+    if (typeof window === 'object') {
+      if (logDisabled_) {
+        return;
+      }
+      if (typeof console !== 'undefined' && typeof console.log === 'function') {
+        console.log.apply(console, arguments);
+      }
+    }
+  },
+
+  /**
+   * Shows a deprecation warning suggesting the modern and spec-compatible API.
+   */
+  deprecated: function(oldMethod, newMethod) {
+    if (!deprecationWarnings_) {
+      return;
+    }
+    console.warn(oldMethod + ' is deprecated, please use ' + newMethod +
+        ' instead.');
+  },
+
+  /**
+   * Extract browser version out of the provided user agent string.
+   *
+   * @param {!string} uastring userAgent string.
+   * @param {!string} expr Regular expression used as match criteria.
+   * @param {!number} pos position in the version string to be returned.
+   * @return {!number} browser version.
+   */
+  extractVersion: function(uastring, expr, pos) {
+    var match = uastring.match(expr);
+    return match && match.length >= pos && parseInt(match[pos], 10);
+  },
+
+  /**
+   * Browser detector.
+   *
+   * @return {object} result containing browser and version
+   *     properties.
+   */
+  detectBrowser: function(window) {
+    var navigator = window && window.navigator;
+
+    // Returned result object.
+    var result = {};
+    result.browser = null;
+    result.version = null;
+
+    // Fail early if it's not a browser
+    if (typeof window === 'undefined' || !window.navigator) {
+      result.browser = 'Not a browser.';
+      return result;
+    }
+
+    // Firefox.
+    if (navigator.mozGetUserMedia) {
+      result.browser = 'firefox';
+      result.version = this.extractVersion(navigator.userAgent,
+          /Firefox\/(\d+)\./, 1);
+    } else if (navigator.webkitGetUserMedia) {
+      // Chrome, Chromium, Webview, Opera, all use the chrome shim for now
+      if (window.webkitRTCPeerConnection) {
+        result.browser = 'chrome';
+        result.version = this.extractVersion(navigator.userAgent,
+          /Chrom(e|ium)\/(\d+)\./, 2);
+      } else { // Safari (in an unpublished version) or unknown webkit-based.
+        if (navigator.userAgent.match(/Version\/(\d+).(\d+)/)) {
+          result.browser = 'safari';
+          result.version = this.extractVersion(navigator.userAgent,
+            /AppleWebKit\/(\d+)\./, 1);
+        } else { // unknown webkit-based browser.
+          result.browser = 'Unsupported webkit-based browser ' +
+              'with GUM support but no WebRTC support.';
+          return result;
+        }
+      }
+    } else if (navigator.mediaDevices &&
+        navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)) { // Edge.
+      result.browser = 'edge';
+      result.version = this.extractVersion(navigator.userAgent,
+          /Edge\/(\d+).(\d+)$/, 2);
+    } else if (navigator.mediaDevices &&
+        navigator.userAgent.match(/AppleWebKit\/(\d+)\./)) {
+        // Safari, with webkitGetUserMedia removed.
+      result.browser = 'safari';
+      result.version = this.extractVersion(navigator.userAgent,
+          /AppleWebKit\/(\d+)\./, 1);
+    } else { // Default fallthrough: not supported.
+      result.browser = 'Not a supported browser.';
+      return result;
+    }
+
+    return result;
+  },
+
+  // shimCreateObjectURL must be called before shimSourceObject to avoid loop.
+
+  shimCreateObjectURL: function(window) {
+    var URL = window && window.URL;
+
+    if (!(typeof window === 'object' && window.HTMLMediaElement &&
+          'srcObject' in window.HTMLMediaElement.prototype)) {
+      // Only shim CreateObjectURL using srcObject if srcObject exists.
+      return undefined;
+    }
+
+    var nativeCreateObjectURL = URL.createObjectURL.bind(URL);
+    var nativeRevokeObjectURL = URL.revokeObjectURL.bind(URL);
+    var streams = new Map(), newId = 0;
+
+    URL.createObjectURL = function(stream) {
+      if ('getTracks' in stream) {
+        var url = 'polyblob:' + (++newId);
+        streams.set(url, stream);
+        utils.deprecated('URL.createObjectURL(stream)',
+            'elem.srcObject = stream');
+        return url;
+      }
+      return nativeCreateObjectURL(stream);
+    };
+    URL.revokeObjectURL = function(url) {
+      nativeRevokeObjectURL(url);
+      streams.delete(url);
+    };
+
+    var dsc = Object.getOwnPropertyDescriptor(window.HTMLMediaElement.prototype,
+                                              'src');
+    Object.defineProperty(window.HTMLMediaElement.prototype, 'src', {
+      get: function() {
+        return dsc.get.apply(this);
+      },
+      set: function(url) {
+        this.srcObject = streams.get(url) || null;
+        return dsc.set.apply(this, [url]);
+      }
+    });
+
+    var nativeSetAttribute = window.HTMLMediaElement.prototype.setAttribute;
+    window.HTMLMediaElement.prototype.setAttribute = function() {
+      if (arguments.length === 2 &&
+          ('' + arguments[0]).toLowerCase() === 'src') {
+        this.srcObject = streams.get(arguments[1]) || null;
+      }
+      return nativeSetAttribute.apply(this, arguments);
+    };
+  }
+};
+
+// Export.
+module.exports = {
+  log: utils.log,
+  deprecated: utils.deprecated,
+  disableLog: utils.disableLog,
+  disableWarnings: utils.disableWarnings,
+  extractVersion: utils.extractVersion,
+  shimCreateObjectURL: utils.shimCreateObjectURL,
+  detectBrowser: utils.detectBrowser.bind(utils)
+};
diff --git a/test/.eslintrc b/test/.eslintrc
new file mode 100644
index 0000000..6450e68
--- /dev/null
+++ b/test/.eslintrc
@@ -0,0 +1,7 @@
+{
+    "env": {
+        "node": true,
+        "es6": true
+    },
+    "rules": {}
+}
diff --git a/test/README.md b/test/README.md
new file mode 100644
index 0000000..6970385
--- /dev/null
+++ b/test/README.md
@@ -0,0 +1,77 @@
+[![Build Status](https://travis-ci.org/webrtc/samples.svg)](https://travis-ci.org/webrtc/samples)
+
+# Intro #
+Selenium WebDriver, Node, Testling and travis-multirunner are used as the testing framework. Selenium WebDriver drives the browser; Node and Testling manage the tests, while travis-multirunner downloads and installs the browsers to be tested on, i.e. creates the testing matrix.
+
+Functional unit tests located in `test/unit` are run in node using [Mocha](https://mochajs.org/), [Chai](http://chaijs.com/) and [Sinon](http://sinonjs.org/).
+
+## Development ##
+Detailed information on developing in the [webrtc](https://github.com/webrtc) GitHub repo can be mark in the [WebRTC GitHub repo developer's guide](https://docs.google.com/document/d/1tn1t6LW2ffzGuYTK3366w1fhTkkzsSvHsBnOHoDfRzY/edit?pli=1#heading=h.e3366rrgmkdk).
+
+This guide assumes you are running a Debian based Linux distribution (travis-multirunner currently fetches .deb browser packages).
+
+#### Clone the repo in desired folder
+```bash
+git clone https://github.com/webrtc/adapter.git
+```
+
+#### Install npm dependencies
+```bash
+npm install
+```
+
+#### Build
+In order to get a usable file, you need to build it.
+```bash
+grunt build
+```
+This will result in 4 files in the out/ folder:
+* adapter.js - includes all the shims and is visible in the browser under the global `adapter` object (window.adapter).
+* adapter_no_edge.js - same as above but does not include the Microsoft Edge (ORTC) shim.
+* adapter_no_edge_no_global.js same as above but is not exposed/visible in the browser (you cannot call/interact with the shims in the browser).
+* adapter.js_no_global.js - same as adapter.js but is not exposed/visible in the browser (you cannot call/interact with the shims in the browser).
+
+#### Run tests
+Runs grunt and tests in test/tests.js. Change the browser to your choice, more details [here](#changeBrowser)
+```bash
+BROWSER=chrome BVER=stable npm test
+```
+
+#### Add tests
+test/tests.js is used as an index for the tests, tests should be added here using `require()`.
+The tests themselves should be placed in the same js folder as main.js: e.g.`src/content/getusermedia/gum/js/test.js`.
+
+The tests should be written using Testling for test validation (using Tape script language) and Selenium WebDriver is used to control and drive the test in the browser.
+
+Use the existing tests as guide on how to write tests and also look at the [Testling guide](https://ci.testling.com/guide/tape) and [Selenium WebDriver](http://www.seleniumhq.org/docs/03_webdriver.jsp) (make sure to select javascript as language preference.) for more information.
+
+Global Selenium WebDriver settings can be found in `test/selenium-lib.js`, if your test require some specific settings not covered in selenium-lib.js, add your own to the test and do not import the selenium-lib.js file into the test, only do this if it's REALLY necessary.
+
+Once your test is ready, create a pull request and see how it runs on travis-multirunner.
+
+#### Change browser and channel/version for testing <a id="changeBrowser"></a>
+Chrome stable is currently installed as the default browser for the tests.
+
+Currently Chrome and Firefox are supported[*](#expBrowser), check [travis-multirunner](https://github.com/DamonOehlman/travis-multirunner/blob/master/) repo for updates around this.
+Firefox channels supported are stable, beta and nightly.
+Chrome channels supported on Linux are stable, beta and unstable.
+
+To select a different browser and/or channel version, change environment variables BROWSER and BVER, then you can rerun the tests with the new browser.
+```bash
+export BROWSER=firefox BVER=nightly
+```
+
+Alternatively you can also do it without changing environment variables.
+```bash
+BROWSER=firefox BVER=nightly npm test
+```
+
+###* Experimental browser support <a id="expBrowser"></a>
+You can run the tests in any currently installed browser locally that is supported by Selenium WebDriver but you have to bypass travis-multirunner. Also it only makes sense to use a WebRTC supported browser.
+
+* Remove the `.setBinary()` and `.setChromeBinaryPath()` methods in `test/selenium-lib.js` (these currently point to travis-multirunner scripts that only run on Debian based Linux distributions) or change them to point to a location of your choice.
+* Then add the Selenium driver of the browser you want to use to `test/selenium-lib.js`, check Selenium WebDriver [supported browsers](http://www.seleniumhq.org/about/platforms.jsp#browsers) page for more details.
+* Then just do the following (replace "opera" with your browser of choice) in order to run all tests
+```bash
+BROWSER=opera npm test
+```
diff --git a/test/e2e/browserdetails.js b/test/e2e/browserdetails.js
new file mode 100644
index 0000000..194a5ef
--- /dev/null
+++ b/test/e2e/browserdetails.js
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+describe('window.adapter', () => {
+  it('exists', () => {
+    expect(window).to.have.property('adapter');
+  });
+
+  describe('browserDetails', () => {
+    it('exists', () => {
+      expect(window.adapter).to.have.property('browserDetails');
+    });
+
+    it('detects a browser type', () => {
+      expect(window.adapter.browserDetails).to.have.property('browser');
+    });
+
+    it('detects a browser version', () => {
+      expect(window.adapter.browserDetails).to.have.property('version');
+    });
+  });
+});
diff --git a/test/e2e/connection.js b/test/e2e/connection.js
new file mode 100644
index 0000000..d18fa70
--- /dev/null
+++ b/test/e2e/connection.js
@@ -0,0 +1,288 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+describe('establishes a connection', () => {
+  let pc1;
+  let pc2;
+  function noop() {};
+  function throwError(err) {
+    console.error(err.toString());
+    throw(err);
+  }
+  function addCandidate(pc, event) {
+    pc.addIceCandidate(event.candidate, noop, throwError);
+  };
+
+  function negotiate(pc1, pc2) {
+    return pc1.createOffer()
+    .then(function(offer) {
+      return pc1.setLocalDescription(offer);
+    }).then(function() {
+      return pc2.setRemoteDescription(pc1.localDescription);
+    }).then(function() {
+      return pc2.createAnswer();
+    }).then(function(answer) {
+      return pc2.setLocalDescription(answer);
+    }).then(function() {
+      return pc1.setRemoteDescription(pc2.localDescription);
+    })
+  }
+
+  beforeEach(() => {
+    pc1 = new RTCPeerConnection(null);
+    pc2 = new RTCPeerConnection(null);
+
+    pc1.onicecandidate = function(event) {
+      addCandidate(pc2, event);
+    };
+    pc2.onicecandidate = function(event) {
+      addCandidate(pc1, event);
+    };
+  });
+  afterEach(() => {
+    pc1.close();
+    pc2.close();
+  });
+
+  it('with legacy callbacks', (done) => {
+    pc1.oniceconnectionstatechange = function() {
+      if (pc1.iceConnectionState === 'connected' ||
+          pc1.iceConnectionState === 'completed') {
+        done();
+      }
+    };
+
+    var constraints = {video: true};
+    navigator.mediaDevices.getUserMedia(constraints)
+    .then(function(stream) {
+      pc1.addStream(stream);
+
+      pc1.createOffer(
+        function(offer) {
+          pc1.setLocalDescription(offer,
+            function() {
+              pc2.setRemoteDescription(offer,
+                function() {
+                  pc2.createAnswer(
+                    function(answer) {
+                      pc2.setLocalDescription(answer,
+                        function() {
+                          pc1.setRemoteDescription(answer, noop, throwError);
+                        },
+                        throwError
+                      );
+                    },
+                    throwError
+                  );
+                },
+                throwError
+              );
+            },
+            throwError
+          );
+        },
+        throwError
+      );
+    });
+  });
+
+  it('with promises', (done) => {
+    pc1.oniceconnectionstatechange = function() {
+      if (pc1.iceConnectionState === 'connected' ||
+          pc1.iceConnectionState === 'completed') {
+        done();
+      }
+    };
+
+    var constraints = {video: true};
+    navigator.mediaDevices.getUserMedia(constraints)
+    .then(function(stream) {
+      pc1.addStream(stream);
+      return negotiate(pc1, pc2);
+    })
+    .catch(throwError);
+  });
+
+  it('with streams in both directions', (done) => {
+    pc1.oniceconnectionstatechange = function() {
+      if (pc1.iceConnectionState === 'connected' ||
+          pc1.iceConnectionState === 'completed') {
+        done();
+      }
+    };
+
+    var constraints = {video: true};
+    navigator.mediaDevices.getUserMedia(constraints)
+    .then(function(stream) {
+      pc1.addStream(stream);
+      pc2.addStream(stream);
+      return negotiate(pc1, pc2);
+    })
+    .catch(throwError);
+  });
+
+  describe('with addTrack', () => {
+    it('and all tracks of a stream', (done) => {
+      pc1.oniceconnectionstatechange = function() {
+        if (pc1.iceConnectionState === 'connected' ||
+            pc1.iceConnectionState === 'completed') {
+          done();
+        }
+      };
+
+      pc2.onaddstream = function(event) {
+        expect(event).to.have.property('stream');
+        expect(event.stream.getAudioTracks()).to.have.length(1);
+        expect(event.stream.getVideoTracks()).to.have.length(1);
+      };
+
+      var constraints = {audio: true, video: true};
+      navigator.mediaDevices.getUserMedia(constraints)
+      .then(function(stream) {
+        stream.getTracks().forEach(function(track) {
+          pc1.addTrack(track, stream);
+        });
+        return negotiate(pc1, pc2);
+      })
+      .catch(throwError);
+    });
+
+    it('but only the audio track of an av stream', (done) => {
+      pc1.oniceconnectionstatechange = function() {
+        if (pc1.iceConnectionState === 'connected' ||
+            pc1.iceConnectionState === 'completed') {
+          done();
+        }
+      };
+
+      pc2.onaddstream = function(event) {
+        expect(event).to.have.property('stream');
+        expect(event.stream.getAudioTracks()).to.have.length(1);
+        expect(event.stream.getVideoTracks()).to.have.length(0);
+      };
+
+      var constraints = {audio: true, video: true};
+      navigator.mediaDevices.getUserMedia(constraints)
+      .then(function(stream) {
+        stream.getAudioTracks().forEach(function(track) {
+          pc1.addTrack(track, stream);
+        });
+        return negotiate(pc1, pc2);
+      })
+      .catch(throwError);
+    });
+
+    it('as two streams', (done) => {
+      let streams = [];
+      pc1.oniceconnectionstatechange = function() {
+        if (pc1.iceConnectionState === 'connected' ||
+            pc1.iceConnectionState === 'completed') {
+          expect(streams).to.have.length(2);
+          done();
+        }
+      };
+
+      pc2.onaddstream = function(event) {
+        expect(event).to.have.property('stream');
+        expect(event.stream.getTracks()).to.have.length(1);
+        streams.push(event.stream);
+      };
+
+      var constraints = {audio: true, video: true};
+      navigator.mediaDevices.getUserMedia(constraints)
+      .then(function(stream) {
+        var audioStream = new MediaStream(stream.getAudioTracks());
+        var videoStream = new MediaStream(stream.getVideoTracks());
+        audioStream.getTracks().forEach(function(track) {
+          pc1.addTrack(track, audioStream);
+        });
+        videoStream.getTracks().forEach(function(track) {
+          pc1.addTrack(track, videoStream);
+        });
+        return negotiate(pc1, pc2);
+      })
+      .catch(throwError);
+    });
+  });
+
+  it('with no explicit end-of-candidates', function(done) {
+    if (window.adapter.browserDetails.browser === 'edge') {
+      this.timeout(10000);
+    }
+    pc1.oniceconnectionstatechange = function() {
+      if (pc1.iceConnectionState === 'connected' ||
+          pc1.iceConnectionState === 'completed') {
+        done();
+      }
+    };
+
+    pc1.onicecandidate = (event) => {
+      if (event.candidate) {
+        pc2.addIceCandidate(event.candidate, noop, throwError);
+      }
+    };
+    pc2.onicecandidate = (event) => {
+      if (event.candidate) {
+        pc1.addIceCandidate(event.candidate, noop, throwError);
+      }
+    };
+
+    var constraints = {video: true};
+    navigator.mediaDevices.getUserMedia(constraints)
+    .then(function(stream) {
+      stream.getTracks().forEach(function(track) {
+        pc1.addTrack(track, stream);
+      });
+      return negotiate(pc1, pc2);
+    })
+    .catch(throwError);
+  });
+
+  describe('with datachannel', function() {
+    beforeEach(function() {
+      if (window.adapter.browserDetails.browser === 'edge') {
+        this.skip();
+      }
+    });
+
+    it('establishes a connection', (done) => {
+      pc1.oniceconnectionstatechange = function() {
+        if (pc1.iceConnectionState === 'connected' ||
+            pc1.iceConnectionState === 'completed') {
+          done();
+        }
+      };
+
+      pc1.createDataChannel('foo');
+      negotiate(pc1, pc2)
+      .catch(throwError);
+    });
+  });
+
+  it('and calls the video loadedmetadata', (done) => {
+    pc2.addEventListener('addstream', function(e) {
+      var v = document.createElement('video');
+      v.autoplay = true;
+      v.addEventListener('loadedmetadata', function() {
+        done();
+      });
+      v.srcObject = e.stream;
+    });
+    var constraints = {video: true};
+    navigator.mediaDevices.getUserMedia(constraints)
+    .then(function(stream) {
+      stream.getTracks().forEach(function(track) {
+        pc1.addTrack(track, stream);
+      });
+      return negotiate(pc1, pc2);
+    })
+    .catch(throwError);
+  });
+});
diff --git a/test/e2e/getusermedia.js b/test/e2e/getusermedia.js
new file mode 100644
index 0000000..15caff0
--- /dev/null
+++ b/test/e2e/getusermedia.js
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+describe('getUserMedia', () => {
+  describe('navigator.getUserMedia', () => {
+    it('exists', () => {
+      expect(navigator).to.have.property('getUserMedia');
+    });
+
+    it('calls the callback', (done) => {
+      navigator.getUserMedia({video: true}, (stream) => {
+        expect(stream.getTracks()).to.have.length(1);
+        done();
+      }, (err) => {
+        throw(err);
+      });
+    });
+  });
+
+  describe('navigator.mediaDevices.getUserMedia', () => {
+    it('exists', () => {
+      expect(navigator).to.have.property('getUserMedia');
+    });
+
+    it('fulfills the promise', (done) => {
+      navigator.mediaDevices.getUserMedia({video: true})
+      .then((stream) => {
+        expect(stream.getTracks()).to.have.length(1);
+        done();
+      }, (err) => {
+        throw(err);
+      });
+    });
+  });
+});
diff --git a/test/e2e/mediastream.js b/test/e2e/mediastream.js
new file mode 100644
index 0000000..947f294
--- /dev/null
+++ b/test/e2e/mediastream.js
@@ -0,0 +1,15 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+describe('MediaStream', () => {
+  it('window.MediaStream exists', () => {
+    expect(window).to.have.property('MediaStream');
+  });
+});
diff --git a/test/e2e/ontrack.js b/test/e2e/ontrack.js
new file mode 100644
index 0000000..9bd24e3
--- /dev/null
+++ b/test/e2e/ontrack.js
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+describe('track event', () => {
+  let pc;
+  beforeEach(() => {
+    pc = new RTCPeerConnection();
+  });
+
+  const sdp = 'v=0\r\n' +
+      'o=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+      's=-\r\n' +
+      't=0 0\r\n' +
+      'a=msid-semantic:WMS *\r\n' +
+      'm=audio 9 UDP/TLS/RTP/SAVPF 111\r\n' +
+      'c=IN IP4 0.0.0.0\r\n' +
+      'a=rtcp:9 IN IP4 0.0.0.0\r\n' +
+      'a=ice-ufrag:someufrag\r\n' +
+      'a=ice-pwd:somelongpwdwithenoughrandomness\r\n' +
+      'a=fingerprint:sha-256 8C:71:B3:8D:A5:38:FD:8F:A4:2E:A2:65:6C:86:52' +
+      ':BC:E0:6E:94:F2:9F:7C:4D:B5:DF:AF:AA:6F:44:90:8D:F4\r\n' +
+      'a=setup:actpass\r\n' +
+      'a=rtcp-mux\r\n' +
+      'a=mid:mid1\r\n' +
+      'a=sendonly\r\n' +
+      'a=rtpmap:111 opus/48000/2\r\n' +
+      'a=msid:stream1 track1\r\n' +
+      'a=ssrc:1001 cname:some\r\n';
+
+  it('RTCPeerConnection.prototype.ontrack exists', () => {
+    expect('ontrack' in RTCPeerConnection.prototype).to.equal(true);
+  });
+
+  describe('is called by setRemoteDescription', () => {
+    it.skip('track event', (done) => {
+      pc.addEventListener('track', () => {
+        done();
+      });
+      pc.setRemoteDescription({type: 'offer', sdp})
+    });
+
+    it('ontrack', (done) => {
+      pc.ontrack = () => {
+        done();
+      };
+      pc.setRemoteDescription({type: 'offer', sdp})
+    });
+  });
+
+  describe('the event has', () => {
+    it('a track', (done) => {
+      pc.ontrack = (e) => {
+        expect(e).to.have.property('track');
+        done();
+      };
+      pc.setRemoteDescription({type: 'offer', sdp});
+    });
+
+    it('a set of streams', (done) => {
+      pc.ontrack = (e) => {
+        expect(e).to.have.property('streams');
+        expect(e.streams).to.be.an('array');
+        done();
+      };
+      pc.setRemoteDescription({type: 'offer', sdp});
+    });
+
+    it('a receiver that is contained in the set of receivers', (done) => {
+      pc.ontrack = (e) => {
+        expect(e).to.have.property('receiver');
+        expect(pc.getReceivers()).to.contain(e.receiver);
+        done();
+      };
+      pc.setRemoteDescription({type: 'offer', sdp})
+    });
+  });
+});
diff --git a/test/e2e/rtcicecandidate.js b/test/e2e/rtcicecandidate.js
new file mode 100644
index 0000000..d999447
--- /dev/null
+++ b/test/e2e/rtcicecandidate.js
@@ -0,0 +1,15 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+describe('RTCIceCandidate', () => {
+  it('window.RTCIceCandidate exists', () => {
+    expect(window).to.have.property('RTCIceCandidate');
+  });
+});
diff --git a/test/e2e/rtcpeerconnection.js b/test/e2e/rtcpeerconnection.js
new file mode 100644
index 0000000..53fcaa1
--- /dev/null
+++ b/test/e2e/rtcpeerconnection.js
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+describe('RTCPeerConnection', () => {
+  it('window.RTCPeerConnection exists', () => {
+    expect(window).to.have.property('RTCPeerConnection');
+  });
+
+  it('constructor works', () => {
+    const constructor = () => {
+      return new RTCPeerConnection();
+    };
+    expect(constructor).not.to.throw();
+  });
+
+  describe('getSenders', () => {
+    it('exists', () => {
+      expect(RTCPeerConnection.prototype).to.have.property('getSenders');
+    });
+  });
+});
diff --git a/test/e2e/rtcsessiondescription.js b/test/e2e/rtcsessiondescription.js
new file mode 100644
index 0000000..f0ee6b3
--- /dev/null
+++ b/test/e2e/rtcsessiondescription.js
@@ -0,0 +1,15 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+describe('RTCSessionDescription', () => {
+  it('window.RTCSessionDescription exists', () => {
+    expect(window).to.have.property('RTCSessionDescription');
+  });
+});
diff --git a/test/e2e/srcobject.js b/test/e2e/srcobject.js
new file mode 100644
index 0000000..8c1a157
--- /dev/null
+++ b/test/e2e/srcobject.js
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+describe('srcObject', () => {
+  ['audio', 'video'].forEach((mediaType) => {
+    describe('setter', () => {
+      it('triggers loadedmetadata (' + mediaType + ')', (done) => {
+        let constraints = {};
+        constraints[mediaType] = true;
+        navigator.mediaDevices.getUserMedia(constraints)
+        .then((stream) => {
+          const mediaElement = document.createElement(mediaType);
+          mediaElement.setAttribute('autoplay', 'true');
+          // If the srcObject shim works, we should get media 
+          // at some point. This will trigger loadedmetadata.
+          mediaElement.addEventListener('loadedmetadata', function() {
+            done();
+          });
+          mediaElement.srcObject = stream;
+        });
+      });
+    });
+
+    describe('getter', () => {
+      it('returns the stream (' + mediaType + ')', () => {
+        let constraints = {};
+        constraints[mediaType] = true;
+        navigator.mediaDevices.getUserMedia(constraints)
+        .then((stream) => {
+          const mediaElement = document.createElement(mediaType);
+          mediaElement.setAttribute('autoplay', 'true');
+          mediaElement.setAttribute('id', mediaType);
+          mediaElement.srcObject = stream;
+          expect(mediaElement.srcObject).to.have.property('id');
+          expect(mediaElement.srcObject.id).to.equal(stream.id);
+          done();
+        });
+      });
+    });
+  });
+});
diff --git a/test/getusermedia-mocha.js b/test/getusermedia-mocha.js
new file mode 100644
index 0000000..2afda65
--- /dev/null
+++ b/test/getusermedia-mocha.js
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* eslint-env node */
+/* global beforeEach, afterEach */
+'use strict';
+
+/* wrap navigator.getUserMedia and navigator.mediaDevices.getUserMedia
+ * so that any streams acquired are released after each test.
+ */
+beforeEach(() => {
+  let streams = [];
+  let release = () => {
+    streams.forEach((stream) => {
+      stream.getTracks().forEach((track) => {
+        track.stop();
+      });
+    });
+    streams = [];
+  };
+
+  let origGetUserMedia = navigator.getUserMedia.bind(navigator);
+  navigator.getUserMedia = (constraints, cb, eb) => {
+    origGetUserMedia(constraints, (stream) => {
+      streams.push(stream);
+      if (cb) {
+        cb.apply(null, [stream]);
+      }
+    }, eb);
+  };
+  navigator.getUserMedia.restore = () => {
+    navigator.getUserMedia = origGetUserMedia;
+    release();
+  };
+
+  let origMediaDevicesGetUserMedia =
+      navigator.mediaDevices.getUserMedia.bind(navigator.mediaDevices);
+  navigator.mediaDevices.getUserMedia = (constraints) => {
+    return origMediaDevicesGetUserMedia(constraints, (stream) => {
+      streams.push(stream);
+      return stream;
+    });
+  };
+  navigator.mediaDevices.getUserMedia.restore = () => {
+    navigator.mediaDevices.getUserMedia = origMediaDevicesGetUserMedia;
+    release();
+  };
+});
+
+afterEach(() => {
+  navigator.getUserMedia.restore();
+  navigator.mediaDevices.getUserMedia.restore();
+});
diff --git a/test/karma.conf.js b/test/karma.conf.js
new file mode 100644
index 0000000..63089f6
--- /dev/null
+++ b/test/karma.conf.js
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+const os = require('os');
+
+let browsers;
+if (process.env.BROWSER) {
+  if (process.env.BROWSER === 'MicrosoftEdge') {
+    browsers = ['Edge'];
+  } else if (process.env.BROWSER === 'safari') {
+    browsers = ['Safari'];
+  } else {
+    browsers = [process.env.BROWSER];
+  }
+} else if (os.platform() === 'darwin') {
+  browsers = ['chrome', 'firefox', 'Safari'];
+} else if (os.platform() === 'win32') {
+  browsers = ['chrome', 'firefox', 'Edge'];
+} else {
+  browsers = ['chrome', 'firefox'];
+}
+
+let chromeFlags = [
+  '--use-fake-device-for-media-stream',
+  '--use-fake-ui-for-media-stream',
+  '--headless', '--disable-gpu', '--remote-debugging-port=9222'
+];
+if (process.env.CHROMEEXPERIMENT !== 'false') {
+  chromeFlags.push('--enable-experimental-web-platform-features');
+}
+
+module.exports = function(config) {
+  config.set({
+    basePath: '..',
+    frameworks: ['browserify', 'mocha', 'chai'],
+    files: [
+      'src/js/adapter_core.js',
+      'test/getusermedia-mocha.js',
+      'test/e2e/*.js',
+    ],
+    exclude: [],
+    preprocessors: {
+      'src/js/adapter_core.js': ['browserify']
+    },
+    reporters: ['mocha'],
+    port: 9876,
+    colors: true,
+    logLevel: config.LOG_INFO,
+    autoWatch: false,
+    customLaunchers: {
+      chrome: {
+        base: 'Chrome',
+        flags: chromeFlags
+      },
+      firefox: {
+        base: 'Firefox',
+        prefs: {
+          'media.navigator.streams.fake': true,
+          'media.navigator.permission.disabled': true
+        }
+      }
+    },
+    singleRun: true,
+    concurrency: Infinity,
+    browsers,
+    browserify: {
+      debug: true,
+      transform: ['brfs'],
+      standalone: 'adapter',
+    },
+  });
+};
diff --git a/test/run-tests.js b/test/run-tests.js
new file mode 100644
index 0000000..4f80124
--- /dev/null
+++ b/test/run-tests.js
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+
+'use strict';
+var fs = require('fs');
+var os = require('os');
+var test = require('tape');
+
+if (!process.env.BROWSER) {
+  process.env.BROWSER = 'chrome';
+}
+if (!process.env.BVER) {
+  process.env.BVER = 'stable';
+}
+var browserbin = './browsers/bin/' + process.env.BROWSER +
+    '-' + process.env.BVER;
+
+// install browsers via travis-multirunner (on Linux).
+if (os.platform() === 'linux' &&
+    process.env.BROWSER !== 'MicrosoftEdge') {
+  try {
+    fs.accessSync(browserbin, fs.X_OK);
+  } catch (e) {
+    if (e.code === 'ENOENT') {
+      // execute travis-multirunner setup to install browser
+      require('child_process').execSync(
+          './node_modules/travis-multirunner/setup.sh');
+    }
+  }
+}
+if (os.platform() === 'win32') {
+  if (process.env.BROWSER === 'MicrosoftEdge') {
+    // assume MicrosoftWebDriver is installed.
+    process.env.PATH += ';C:\\Program Files (x86)\\Microsoft Web Driver\\';
+  }
+  if (process.env.BROWSER === 'chrome') {
+    // for some reason chromedriver doesn't like the one in node_modules\.bin
+    process.env.PATH += ';' + process.cwd() +
+      '\\node_modules\\chromedriver\\lib\\chromedriver\\';
+  }
+}
+
+// Add all test files here with a short comment.
+
+// Checks that the tests can start and that execution finishes.
+require('./test');
+
+// This is run as a test so it is executed after all tests
+// have completed.
+test('Shutdown', function(t) {
+  var driver = require('./selenium-lib').buildDriver();
+  driver.close()
+  .then(function() {
+    driver.quit().then(function() {
+      t.end();
+    });
+  })
+  .catch(function(err) {
+    // Edge doesn't like close->quit
+    console.log(err.name);
+    if (process.env.BROWSER === 'MicrosoftEdge') {
+      t.end();
+    }
+  });
+});
diff --git a/test/selenium-lib.js b/test/selenium-lib.js
new file mode 100644
index 0000000..6053135
--- /dev/null
+++ b/test/selenium-lib.js
@@ -0,0 +1,154 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+'use strict';
+
+// https://code.google.com/p/selenium/wiki/WebDriverJs
+var webdriver = require('selenium-webdriver');
+var chrome = require('selenium-webdriver/chrome');
+var firefox = require('selenium-webdriver/firefox');
+var edge = require('selenium-webdriver/edge');
+var safari = require('selenium-webdriver/safari');
+var fs = require('fs');
+var os = require('os');
+
+var sharedDriver = null;
+
+function getBrowserVersion() {
+  var browser = process.env.BROWSER;
+  var browserChannel = process.env.BVER;
+
+  // Browser reg expressions and position to look for the milestone version.
+  var chromeExp = /\/chrome\/(\d+)\./;
+  var firefoxExp = /\/firefox\/(\d+)\./;
+
+  var browserVersion = function(expr) {
+    var symlink = './browsers/bin/' + browser + '-' + browserChannel;
+    var pathToBrowser = fs.readlinkSync(symlink);
+    var match = pathToBrowser.match(expr);
+    return match && match.length >= 1 && parseInt(match[1], 10);
+  };
+
+  switch (browser) {
+    case 'chrome':
+      return browserVersion(chromeExp);
+    case 'firefox':
+      return browserVersion(firefoxExp);
+    case 'safari':
+      return browserChannel;
+    default:
+      return 'non supported browser.';
+  }
+}
+
+function buildDriver() {
+  if (sharedDriver) {
+    return sharedDriver;
+  }
+  // Firefox options.
+  // http://selenium.googlecode.com/git/docs/api/javascript/module_selenium-webdriver_firefox.html
+  var profile = new firefox.Profile();
+  profile.setPreference('media.navigator.streams.fake', true);
+  // This enables device labels for enumerateDevices when using fake devices.
+  profile.setPreference('media.navigator.permission.disabled', true);
+  // Currently the FF webdriver extension is not signed and FF 41 no longer
+  // allows unsigned extensions by default.
+  // TODO: Remove this once FF no longer allow turning this off and the
+  // selenium team starts making a signed FF webdriver extension.
+  // https://github.com/SeleniumHQ/selenium/issues/901.
+  profile.setPreference('xpinstall.signatures.required', false);
+
+  var firefoxOptions = new firefox.Options()
+      .setProfile(profile);
+  if (os.platform() === 'linux') {
+    firefoxOptions.setBinary('node_modules/.bin/start-firefox');
+  }
+
+  // Chrome options.
+  // http://selenium.googlecode.com/git/docs/api/javascript/module_selenium-webdriver_chrome_class_Options.html#addArguments
+  var chromeOptions = new chrome.Options()
+      .addArguments('allow-file-access-from-files')
+      .addArguments('use-fake-device-for-media-stream')
+      .addArguments('use-fake-ui-for-media-stream');
+  if (os.platform() === 'linux') {
+    chromeOptions.setChromeBinaryPath('node_modules/.bin/start-chrome');
+  }
+
+  if (process.env.BROWSER === 'chrome') {
+    let browserVersion = getBrowserVersion();
+    if (browserVersion >= 49 && process.env.CHROMEEXPERIMENT !== 'false') {
+      chromeOptions.addArguments('--enable-experimental-web-platform-features');
+    }
+    if (browserVersion >= 59) {
+      chromeOptions.addArguments('headless');
+      chromeOptions.addArguments('disable-gpu');
+    }
+  }
+
+  var edgeOptions = new edge.Options();
+
+  var safariOptions = new safari.Options();
+  safariOptions.setTechnologyPreview(process.env.BVER === 'TechnologyPreview');
+
+  sharedDriver = new webdriver.Builder()
+      .forBrowser(process.env.BROWSER)
+      .setFirefoxOptions(firefoxOptions)
+      .setChromeOptions(chromeOptions)
+      .setSafariOptions(safariOptions)
+      .setEdgeOptions(edgeOptions);
+
+  if (process.env.BROWSER === 'MicrosoftEdge') {
+    if (process.env.SELENIUM_SERVER) {
+      sharedDriver.usingServer(process.env.SELENIUM_SERVER);
+    } else if (os.platform() !== 'win32') {
+      throw new Error('MicrosoftEdge is only supported on Windows or via ' +
+          'a selenium server');
+    }
+  } else if (process.env.BROWSER === 'firefox') {
+    let browserVersion = getBrowserVersion();
+    if (browserVersion >= 47) {
+      sharedDriver.getCapabilities().set('marionette', true);
+    }
+  }
+
+  sharedDriver = sharedDriver.build();
+
+  // Set global executeAsyncScript() timeout (default is 0) to allow async
+  // callbacks to be caught in tests.
+  sharedDriver.manage().timeouts().setScriptTimeout(10 * 1000);
+
+  return sharedDriver;
+}
+
+// loads the dummy page that includes adapter.js.
+// In Microsoft Edge (via selenium) this directly injects adapter.js.
+function loadTestPage(driver) {
+  if (process.env.BROWSER === 'MicrosoftEdge') {
+    return driver.get('about:blank').then(function() {
+      return driver.executeScript(fs.readFileSync('out/adapter.js').toString());
+    });
+  }
+  return driver.get('file://' + process.cwd() + '/test/testpage.html');
+}
+
+// A helper function to query stats from a PeerConnection.
+function getStats(driver, peerConnection) {
+  // Execute getStats on peerconnection named `peerConnection`.
+  return driver.executeAsyncScript(
+      'var callback = arguments[arguments.length - 1];' +
+      peerConnection + '.getStats(null).then(function(report) {' +
+      '  callback(report);' +
+      '});');
+}
+
+module.exports = {
+  buildDriver,
+  loadTestPage,
+  getBrowserVersion,
+  getStats
+};
diff --git a/test/test.js b/test/test.js
new file mode 100644
index 0000000..ec10a1c
--- /dev/null
+++ b/test/test.js
@@ -0,0 +1,1343 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+ /* eslint-env node */
+
+'use strict';
+
+// This is a basic test file for use with testling and webdriver.
+// The test script language comes from tape.
+
+var test = require('tape');
+var webdriver = require('selenium-webdriver');
+var seleniumHelpers = require('./selenium-lib');
+
+// Start of tests.
+// Test that adding and removing an eventlistener on navigator.mediaDevices
+// is possible. The usecase for this is the devicechanged event.
+// This does not test whether devicechanged is actually called.
+test('navigator.mediaDevices eventlisteners', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.plan(3);
+    t.pass('Page loaded');
+    return driver.executeScript(
+      'return typeof(navigator.mediaDevices.addEventListener) === ' +
+          '\'function\'');
+  })
+  .then(function(isAddEventListenerFunction) {
+    t.ok(isAddEventListenerFunction,
+        'navigator.mediaDevices.addEventListener is a function');
+    return driver.executeScript(
+    'return typeof(navigator.mediaDevices.removeEventListener) === ' +
+         '\'function\'');
+  })
+  .then(function(isRemoveEventListenerFunction) {
+    t.ok(isRemoveEventListenerFunction,
+      'navigator.mediaDevices.removeEventListener is a function');
+  })
+  .then(function() {
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+test('createObjectURL shim test', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  // Define test.
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    ['audio', 'video'].reduce(function(p, type) {
+      return p.then(function() {
+        var constraints = {fake: true};
+        constraints[type] = true;
+        return navigator.mediaDevices.getUserMedia(constraints);
+      })
+      .then(function(stream) {
+        var element = document.createElement(type);
+        window[type] = element;
+        window[type + 'Stream'] = stream;
+        element.id = type;
+        element.autoplay = true;
+        // Test both ways of setting src
+        if (type === 'audio') {
+          element.src = URL.createObjectURL(stream);
+        } else {
+          element.setAttribute('src', URL.createObjectURL(stream));
+        }
+        return new Promise(function(resolve) {
+          element.addEventListener('loadedmetadata', resolve);
+        });
+      });
+    }, Promise.resolve())
+    .then(function() {
+      document.body.appendChild(window.audio);
+      document.body.appendChild(window.video);
+      callback(null);
+    })
+    .catch(function(err) {
+      callback(err.name);
+    });
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.plan(5);
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(error) {
+    var gumResult = error ? 'error: ' + error : 'no errors';
+    t.ok(!error, 'getUserMedia result:  ' + gumResult);
+    // Wait until loadedmetadata event has fired and appended video element.
+    return driver.wait(webdriver.until.elementLocated(
+      webdriver.By.id('video')), 3000);
+  })
+  .then(function() {
+    return Promise.all([
+      'return document.getElementById("audio").srcObject.id',
+      'return window.audioStream.id',
+      'return document.getElementById("video").srcObject.id',
+      'return window.videoStream.id'
+    ].map(function(script) {
+      return driver.executeScript(script);
+    }))
+    .then(function(ids) {
+      t.ok(ids[0] === ids[1], 'audio srcObject getter returns audio stream');
+      t.ok(ids[2] === ids[3], 'video srcObject getter returns video stream');
+      t.ok(ids[0] !== ids[2], 'audio and video streams are different');
+      t.end();
+    });
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+test('srcObject set from another object', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  // Define test.
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    var constraints = {video: true, fake: true};
+    navigator.mediaDevices.getUserMedia(constraints)
+    .then(function(stream) {
+      window.stream = stream;
+
+      var video = document.createElement('video');
+      var video2 = document.createElement('video2');
+      video.setAttribute('id', 'video');
+      video.setAttribute('autoplay', 'true');
+      video2.setAttribute('id', 'video2');
+      video2.setAttribute('autoplay', 'true');
+      video.srcObject = stream;
+      video2.srcObject = video.srcObject;
+
+      // If the srcObject shim works, we should get a video
+      // at some point. This will trigger loadedmetadata.
+      video.addEventListener('loadedmetadata', function() {
+        document.body.appendChild(video);
+        document.body.appendChild(video2);
+        callback(null);
+      });
+    })
+    .catch(function(err) {
+      callback(err.name);
+    });
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.plan(3);
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(error) {
+    var gumResult = (error) ? 'error: ' + error : 'no errors';
+    t.ok(!error, 'getUserMedia result:  ' + gumResult);
+    // Wait until loadedmetadata event has fired and appended video element.
+    // 5 second timeout in case the event does not fire for some reason.
+    return driver.wait(webdriver.until.elementLocated(
+      webdriver.By.id('video2')), 3000);
+  })
+  .then(function() {
+    return driver.executeScript(
+        'return document.getElementById(\'video\').srcObject.id')
+    .then(function(srcObjectId) {
+      driver.executeScript(
+        'return document.getElementById(\'video2\').srcObject.id')
+      .then(function(srcObjectId2) {
+        t.ok(srcObjectId === srcObjectId2,
+            'Stream ids from srcObjects match.');
+      });
+    });
+  })
+  .then(function() {
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+test('srcObject null setter', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  // Define test.
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    var constraints = {video: true, fake: true};
+    navigator.mediaDevices.getUserMedia(constraints)
+    .then(function(stream) {
+      window.stream = stream;
+
+      var video = document.createElement('video');
+      video.setAttribute('id', 'video');
+      video.setAttribute('autoplay', 'true');
+      document.body.appendChild(video);
+      video.srcObject = stream;
+      video.srcObject = null;
+
+      callback(null);
+    })
+    .catch(function(err) {
+      callback(err.name);
+    });
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.plan(3);
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(error) {
+    var gumResult = (error) ? 'error: ' + error : 'no errors';
+    t.ok(!error, 'getUserMedia result:  ' + gumResult);
+    // Wait until loadedmetadata event has fired and appended video element.
+    // 5 second timeout in case the event does not fire for some reason.
+    return driver.wait(webdriver.until.elementLocated(
+      webdriver.By.id('video')), 3000);
+  })
+  .then(function() {
+    return driver.executeScript(
+        'return document.getElementById(\'video\').src');
+  })
+  .then(function(src) {
+    t.ok(src === 'file://' + process.cwd() + '/test/testpage.html' ||
+        // kind of... it actually is this page.
+        src === '', 'src is the empty string');
+  })
+  .then(function() {
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+test('Attach mediaStream directly', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  // Define test.
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    var constraints = {video: true, fake: true};
+    navigator.mediaDevices.getUserMedia(constraints)
+    .then(function(stream) {
+      window.stream = stream;
+
+      var video = document.createElement('video');
+      video.setAttribute('id', 'video');
+      video.setAttribute('autoplay', 'true');
+      // If the srcObject shim works, we should get a video
+      // at some point. This will trigger loadedmetadata.
+      // Firefox < 38 had issues with this, workaround removed
+      // due to 38 being stable now.
+      video.addEventListener('loadedmetadata', function() {
+        document.body.appendChild(video);
+      });
+
+      video.srcObject = stream;
+      callback(null);
+    })
+    .catch(function(err) {
+      callback(err.name);
+    });
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.plan(4);
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(error) {
+    var gumResult = (error) ? 'error: ' + error : 'no errors';
+    t.ok(!error, 'getUserMedia result:  ' + gumResult);
+    // We need to wait due to the stream can take a while to setup.
+    driver.wait(function() {
+      return driver.executeScript(
+        'return typeof window.stream !== \'undefined\'');
+    }, 3000);
+    return driver.executeScript(
+      // Firefox and Chrome have different constructor names.
+      'return window.stream.constructor.name.match(\'MediaStream\') !== null');
+  })
+  .then(function(isMediaStream) {
+    t.ok(isMediaStream, 'Stream is a MediaStream');
+    // Wait until loadedmetadata event has fired and appended video element.
+    // 5 second timeout in case the event does not fire for some reason.
+    return driver.wait(webdriver.until.elementLocated(
+      webdriver.By.id('video')), 3000);
+  })
+  .then(function() {
+    return driver.wait(function() {
+      return driver.executeScript(
+          'return document.getElementById("video").readyState === 4');
+    }, 3000);
+  })
+  .then(function() {
+    t.pass('Stream attached directly succesfully to a video element');
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+test('Re-attaching mediaStream directly', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  // Define test.
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    var constraints = {video: true, fake: true};
+    navigator.mediaDevices.getUserMedia(constraints)
+    .then(function(stream) {
+      window.stream = stream;
+
+      var video = document.createElement('video');
+      var video2 = document.createElement('video');
+      video.setAttribute('id', 'video');
+      video.setAttribute('autoplay', 'true');
+      video2.setAttribute('id', 'video2');
+      video2.setAttribute('autoplay', 'true');
+      // If attachMediaStream works, we should get a video
+      // at some point. This will trigger loadedmetadata.
+      // This reattaches to the second video which will trigger
+      // loadedmetadata there.
+      video.addEventListener('loadedmetadata', function() {
+        document.body.appendChild(video);
+        video2.srcObject = video.srcObject;
+      });
+      video2.addEventListener('loadedmetadata', function() {
+        document.body.appendChild(video2);
+      });
+
+      video.srcObject = stream;
+      callback(null);
+    })
+    .catch(function(err) {
+      callback(err.name);
+    });
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.plan(5);
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(error) {
+    var gumResult = (error) ? 'error: ' + error : 'no errors';
+    t.ok(!error, 'getUserMedia result:  ' + gumResult);
+    // We need to wait due to the stream can take a while to setup.
+    return driver.wait(function() {
+      return driver.executeScript(
+        'return typeof window.stream !== \'undefined\'');
+    }, 3000)
+    .then(function() {
+      return driver.executeScript(
+      // Firefox and Chrome have different constructor names.
+      'return window.stream.constructor.name.match(\'MediaStream\') !== null');
+    });
+  })
+  .then(function(isMediaStream) {
+    t.ok(isMediaStream, 'Stream is a MediaStream');
+    // Wait until loadedmetadata event has fired and appended video element.
+    // 5 second timeout in case the event does not fire for some reason.
+    return driver.wait(webdriver.until.elementLocated(
+      webdriver.By.id('video')), 3000);
+  })
+  .then(function(videoElement) {
+    return driver.wait(function() {
+      return driver.executeScript(
+          'return document.querySelector("video").readyState === 4');
+    }, 3000);
+  })
+  .then(function() {
+    t.pass('Stream attached directly succesfully to a video element');
+    // Wait until loadedmetadata event has fired and appended video element.
+    // 5 second timeout in case the event does not fire for some reason.
+    return driver.wait(webdriver.until.elementLocated(
+      webdriver.By.id('video2')), 3000);
+  })
+  .then(function() {
+    return driver.wait(function() {
+      return driver.executeScript(
+          'return document.getElementById("video2").readyState === 4');
+    }, 3000);
+  })
+  .then(function() {
+    t.pass('Stream re-attached directly succesfully to a video element');
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+// deactivated in Chrome due to https://github.com/webrtc/adapter/issues/180
+test('Call getUserMedia with impossible constraints',
+    {skip: process.env.BROWSER === 'chrome'},
+    function(t) {
+      var driver = seleniumHelpers.buildDriver();
+
+      // Define test.
+      var testDefinition = function() {
+        var callback = arguments[arguments.length - 1];
+
+        var impossibleConstraints = {
+          video: {
+            width: 1280,
+            height: {min: 200, ideal: 720, max: 1080},
+            frameRate: {exact: 0} // to fail
+          }
+        };
+        // TODO: Remove when firefox 42+ accepts impossible constraints
+        // on fake devices.
+        if (window.adapter.browserDetails.browser === 'firefox') {
+          impossibleConstraints.fake = false;
+        }
+        navigator.mediaDevices.getUserMedia(impossibleConstraints)
+        .then(function(stream) {
+          window.stream = stream;
+          callback(null);
+        })
+        .catch(function(err) {
+          callback(err.name);
+        });
+      };
+
+      // Run test.
+      seleniumHelpers.loadTestPage(driver)
+      .then(function() {
+        t.plan(2);
+        t.pass('Page loaded');
+        return driver.executeScript(
+          'return adapter.browserDetails.browser === \'firefox\' ' +
+          '&& adapter.browserDetails.version < 42');
+      })
+      .then(function(isFirefoxAndVersionLessThan42) {
+        if (isFirefoxAndVersionLessThan42) {
+          t.skip('getUserMedia(impossibleConstraints) not supported on < 42');
+          throw 'skip-test';
+        }
+        return driver.executeAsyncScript(testDefinition);
+      })
+      .then(function(error) {
+        t.ok(error, 'getUserMedia(impossibleConstraints) must fail');
+      })
+      .then(function() {
+        t.end();
+      })
+      .then(null, function(err) {
+        if (err !== 'skip-test') {
+          t.fail(err);
+        }
+        t.end();
+      });
+    });
+
+test('dtmf', t => {
+  var driver = seleniumHelpers.buildDriver();
+
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    var pc1 = new RTCPeerConnection(null);
+    var pc2 = new RTCPeerConnection(null);
+
+    pc1.onicecandidate = e => pc2.addIceCandidate(e.candidate);
+    pc2.onicecandidate = e => pc1.addIceCandidate(e.candidate);
+    pc1.onnegotiationneeded = e => pc1.createOffer()
+      .then(offer => pc1.setLocalDescription(offer))
+      .then(() => pc2.setRemoteDescription(pc1.localDescription))
+      .then(() => pc2.createAnswer())
+      .then(answer => pc2.setLocalDescription(answer))
+      .then(() => pc1.setRemoteDescription(pc2.localDescription));
+
+    navigator.mediaDevices.getUserMedia({audio: true})
+    .then(stream => {
+      pc1.addStream(stream);
+      return new Promise(resolve => pc1.oniceconnectionstatechange =
+        e => pc1.iceConnectionState === 'connected' && resolve())
+      .then(() => {
+        let sender = pc1.getSenders().find(s => s.track.kind === 'audio');
+        if (!sender.dtmf) {
+          throw 'skip-test';
+        }
+        sender.dtmf.insertDTMF('1');
+        return new Promise(resolve => sender.dtmf.ontonechange = resolve);
+      })
+      .then(e => {
+        // Test getSenders Chrome polyfill
+        try {
+          // FF51+ doesn't have removeStream
+          if (!('removeStream' in pc1)) {
+            throw new DOMException('', 'NotSupportedError');
+          }
+          // Avoid <FF51 throwing NotSupportedError - https://bugzil.la/1213441
+          pc1.removeStream(stream);
+        } catch (err) {
+          if (err.name !== 'NotSupportedError') {
+            throw err;
+          }
+          pc1.getSenders().forEach(sender => pc1.removeTrack(sender));
+        }
+        stream.getTracks().forEach(track => {
+          let sender = pc1.getSenders().find(s => s.track === track);
+          if (sender) {
+            throw new Error('sender was not removed when it should have been');
+          }
+        });
+        return e.tone;
+      });
+    })
+    .then(tone => callback({tone: tone}),
+          err => callback({error: err.toString()}));
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver).then(() => {
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(({tone, error}) => {
+    if (error) {
+      if (error === 'skip-test') {
+        t.skip('No sender.dtmf support in this browser.');
+      } else {
+        t.fail('PeerConnection failure: ' + error);
+      }
+      return;
+    }
+    t.is(tone, '1', 'DTMF sent');
+  })
+  .then(null, err => t.fail(err))
+  .then(() => t.end());
+});
+
+test('dtmf with addTrack', t => {
+  var driver = seleniumHelpers.buildDriver();
+
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    var pc1 = new RTCPeerConnection(null);
+    var pc2 = new RTCPeerConnection(null);
+
+    pc1.onicecandidate = e => pc2.addIceCandidate(e.candidate);
+    pc2.onicecandidate = e => pc1.addIceCandidate(e.candidate);
+    pc1.onnegotiationneeded = e => pc1.createOffer()
+      .then(offer => pc1.setLocalDescription(offer))
+      .then(() => pc2.setRemoteDescription(pc1.localDescription))
+      .then(() => pc2.createAnswer())
+      .then(answer => pc2.setLocalDescription(answer))
+      .then(() => pc1.setRemoteDescription(pc2.localDescription));
+
+    navigator.mediaDevices.getUserMedia({audio: true})
+    .then(stream => {
+      pc1.addTrack(stream.getAudioTracks()[0], stream);
+      return new Promise(resolve => pc1.oniceconnectionstatechange =
+        e => pc1.iceConnectionState === 'connected' && resolve())
+      .then(() => {
+        let sender = pc1.getSenders().find(s => s.track.kind === 'audio');
+        if (!sender.dtmf) {
+          throw 'skip-test';
+        }
+        sender.dtmf.insertDTMF('1');
+        return new Promise(resolve => sender.dtmf.ontonechange = resolve);
+      })
+      .then(e => {
+        // Test getSenders Chrome polyfill
+        try {
+          // FF51+ doesn't have removeStream
+          if (!('removeStream' in pc1)) {
+            throw new DOMException('', 'NotSupportedError');
+          }
+          // Avoid <FF51 throwing NotSupportedError - https://bugzil.la/1213441
+          pc1.removeStream(stream);
+        } catch (err) {
+          if (err.name !== 'NotSupportedError') {
+            throw err;
+          }
+          pc1.getSenders().forEach(sender => pc1.removeTrack(sender));
+        }
+        stream.getTracks().forEach(track => {
+          let sender = pc1.getSenders().find(s => s.track === track);
+          if (sender) {
+            throw new Error('sender was not removed when it should have been');
+          }
+        });
+        return e.tone;
+      });
+    })
+    .then(tone => callback({tone: tone}),
+          err => callback({error: err.toString()}));
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver).then(() => {
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(({tone, error}) => {
+    if (error) {
+      if (error === 'skip-test') {
+        t.skip('No sender.dtmf support in this browser.');
+      } else {
+        t.fail('PeerConnection failure: ' + error);
+      }
+      return;
+    }
+    t.is(tone, '1', 'DTMF sent');
+  })
+  .then(null, err => t.fail(err))
+  .then(() => t.end());
+});
+
+test('addIceCandidate with null', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    var pc1 = new RTCPeerConnection(null);
+    pc1.addIceCandidate(null)
+    // callback is called with either the empty result
+    // of the .then or the error from .catch.
+    .then(callback)
+    .catch(callback);
+  };
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(err) {
+    t.ok(err === null, 'addIceCandidate(null) resolves');
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+test('addIceCandidate with undefined', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    var pc1 = new RTCPeerConnection(null);
+    pc1.addIceCandidate(undefined)
+    // callback is called with either the empty result
+    // of the .then or the error from .catch.
+    .then(callback)
+    .catch(callback);
+  };
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(err) {
+    t.ok(err === null, 'addIceCandidate(undefined) resolves');
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+test('call enumerateDevices', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    navigator.mediaDevices.enumerateDevices()
+    .then(function(devices) {
+      callback(devices);
+    })
+    .catch(function(err) {
+      callback(err);
+    });
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(callback) {
+    // Callback will either return an error object or device array.
+    if (callback.name === 'Error') {
+      t.fail('Enumerate devices failure: ' + callback.toString());
+    } else {
+      return callback;
+    }
+  })
+  .then(function(devices) {
+    t.ok(typeof devices.length === 'number', 'Produced a devices array');
+    devices.forEach(function(device) {
+      t.ok(device.kind === 'videoinput' ||
+           device.kind === 'audioinput' ||
+           device.kind === 'audiooutput', 'Known device kind');
+      t.ok(device.deviceId.length !== undefined, 'Device id present');
+      t.ok(device.label.length !== undefined, 'Device label present');
+    });
+  })
+  .then(function() {
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+// Test polyfill for getStats.
+test('getStats', {skip: true}, function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    window.testsEqualArray = [];
+    var pc1 = new RTCPeerConnection(null);
+
+    // Test expected new behavior.
+    new Promise(function(resolve, reject) {
+      pc1.getStats(null, resolve, reject);
+    })
+    .then(function(report) {
+      window.testsEqualArray.push([typeof report, 'object',
+        'report is an object.']);
+      report.forEach((stat, key) => {
+        window.testsEqualArray.push([stat.id, key,
+          'report key matches stats id.']);
+      });
+      return report;
+    })
+    .then(function(report) {
+      // Test legacy behavior
+      for (var key in report) {
+        // This avoids problems with Firefox
+        if (typeof report[key] === 'function') {
+          continue;
+        }
+        window.testsEqualArray.push([report[key].id, key,
+          'legacy report key matches stats id.']);
+      }
+      callback(null);
+    })
+    .catch(function(err) {
+      callback('getStats() should never fail: ' + err);
+    });
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(error) {
+    var getStatsResult = (error) ? 'error: ' + error.toString() : 'no errors';
+    t.ok(!error, 'GetStats result:  ' + getStatsResult);
+    return driver.wait(function() {
+      return driver.executeScript('return window.testsEqualArray');
+    });
+  })
+  .then(function(testsEqualArray) {
+    testsEqualArray.forEach(function(resultEq) {
+      // resultEq contains an array of test data,
+      // test condition that should be equal and a success message.
+      // resultEq[0] = typeof report.
+      // resultEq[1] = test condition.
+      // resultEq[0] = Success message.
+      t.equal(resultEq[0], resultEq[1], resultEq[2]);
+    });
+  })
+  .then(function() {
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+// Test that polyfill for Chrome getStats falls back to builtin functionality
+// when the old getStats function signature is used; when the callback is passed
+// as the first argument.
+// FIXME: Implement callbacks for the results as well.
+test('originalChromeGetStats', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    window.testsEqualArray = [];
+    window.testsNotEqualArray = [];
+    var pc1 = new RTCPeerConnection(null);
+
+    new Promise(function(resolve, reject) {  // jshint ignore: line
+      pc1.getStats(resolve, null);
+    })
+    .then(function(response) {
+      var reports = response.result();
+      // TODO: Figure out a way to get inheritance to work properly in
+      // webdriver. report.names() is just an empty object when returned to
+      // webdriver.
+      reports.forEach(function(report) {
+        window.testsEqualArray.push([typeof report, 'object',
+          'report is an object']);
+        window.testsEqualArray.push([typeof report.id, 'string',
+          'report.id is a string']);
+        window.testsEqualArray.push([typeof report.type, 'string',
+          'report.type is a string']);
+        window.testsEqualArray.push([typeof report.timestamp, 'object',
+          'report.timestamp is an object']);
+        report.names().forEach(function(name) {
+          window.testsNotEqualArray.push([report.stat(name), null,
+            'stat ' + name + ' not equal to null']);
+        });
+      });
+      callback(null);
+    })
+    .catch(function(error) {
+      callback('getStats() should never fail: ' + error);
+    });
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.pass('Page loaded');
+    return driver.executeScript('return adapter.browserDetails.browser')
+    .then(function(browser) {
+      if (browser !== 'chrome') {
+        t.skip('Non-chrome browser detected.');
+        throw 'skip-test';
+      }
+    });
+  })
+  .then(function() {
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(error) {
+    var getStatsResult = (error) ? 'error: ' + error.toString() : 'no errors';
+    t.ok(!error, 'GetStats result:  ' + getStatsResult);
+    return driver.wait(function() {
+      return driver.executeScript('return window.testsEqualArray');
+    });
+  })
+  .then(function(testsEqualArray) {
+    driver.executeScript('return window.testsNotEqualArray')
+    .then(function(testsNotEqualArray) {
+      testsEqualArray.forEach(function(resultEq) {
+        // resultEq contains an array of test data,
+        // test condition that should be equal and a success message.
+        // resultEq[0] = typeof report.
+        // resultEq[1] = test condition.
+        // resultEq[0] = Success message.
+        t.equal(resultEq[0], resultEq[1], resultEq[2]);
+      });
+      testsNotEqualArray.forEach(function(resultNoEq) {
+        // resultNoEq contains an array of test data,
+        // test condition that should not be equal and a success message.
+        // resultNoEq[0] = typeof report.
+        // resultNoEq[1] = test condition.
+        // resultNoEq[0] = Success message.
+        t.notEqual(resultNoEq[0], resultNoEq[1], resultNoEq[2]);
+      });
+    });
+  })
+  .then(function() {
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+test('getStats promise', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  // Define test.
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    var testsEqualArray = [];
+    var pc1 = new RTCPeerConnection(null);
+
+    pc1.getStats(null)
+    .then(function(report) {
+      testsEqualArray.push([typeof report, 'object',
+        'getStats with no selector returns a Promise']);
+      // Firefox does not like getStats without any arguments, therefore we call
+      // the callback before the next getStats call.
+      // FIXME: Remove this if ever supported by Firefox, also remove the t.skip
+      // section towards the end of the // Run test section.
+      if (window.adapter.browserDetails.browser === 'firefox') {
+        callback(testsEqualArray);
+        return;
+      }
+      pc1.getStats()
+      .then(function(reportWithoutArg) {
+        testsEqualArray.push([typeof reportWithoutArg, 'object',
+          'getStats with no arguments returns a Promise']);
+        callback(testsEqualArray);
+      })
+      .catch(function(err) {
+        callback(err);
+      });
+    })
+    .catch(function(err) {
+      callback(err);
+    });
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(callback) {
+    // If the callback contains a stackTrace property it's an error, else an
+    // array of tests results.
+    if (callback.stackTrace) {
+      throw callback.message;
+    }
+    return callback;
+  })
+  .then(function(testsEqualArray) {
+    testsEqualArray.forEach(function(resultEq) {
+      // resultEq contains an array of test data,
+      // test condition that should be equal and a success message.
+      // resultEq[0] = typeof report.
+      // resultEq[1] = test condition.
+      // resultEq[0] = Success message.
+      t.equal(resultEq[0], resultEq[1], resultEq[2]);
+    });
+    // FIXME: Remove if supported by firefox. Also remove browser check in
+    // the testDefinition function.
+    return driver.executeScript(
+      'return adapter.browserDetails.browser === \'firefox\'')
+      .then(function(isFirefox) {
+        if (isFirefox) {
+          t.skip('Firefox does not support getStats without arguments.');
+        }
+      });
+  })
+  .then(function() {
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+// iceTransportPolicy is renamed to iceTransports in Chrome by
+// adapter, this tests that when not setting any TURN server,
+// no candidates are generated.
+test('iceTransportPolicy relay functionality',
+    {skip: process.env.BROWSER !== 'chrome'},
+    function(t) {
+      var driver = seleniumHelpers.buildDriver();
+
+      // Define test.
+      var testDefinition = function() {
+        var callback = arguments[arguments.length - 1];
+
+        window.candidates = [];
+
+        var pc1 = new RTCPeerConnection({iceTransportPolicy: 'relay',
+          iceServers: []});
+
+        // Since we try to gather only relay candidates without specifying
+        // a TURN server, we should not get any candidates.
+        pc1.onicecandidate = function(event) {
+          if (event.candidate) {
+            window.candidates.push([event.candidate]);
+            callback(new Error('Candidate found'), event.candidate);
+          } else {
+            callback(null);
+          }
+        };
+
+        var constraints = {video: true, fake: true};
+        navigator.mediaDevices.getUserMedia(constraints)
+        .then(function(stream) {
+          pc1.addStream(stream);
+          pc1.createOffer().then(function(offer) {
+            return pc1.setLocalDescription(offer);
+          })
+          .catch(function(error) {
+            callback(error);
+          });
+        })
+        .catch(function(error) {
+          callback(error);
+        });
+      };
+
+      // Run test.
+      seleniumHelpers.loadTestPage(driver)
+      .then(function() {
+        t.pass('Page loaded');
+        return driver.executeAsyncScript(testDefinition);
+      })
+      .then(function(error) {
+        var errorMessage = (error) ? 'error: ' + error.toString() : 'no errors';
+        t.ok(!error, 'Result:  ' + errorMessage);
+        // We should not really need this due to using an error callback if a
+        // candidate is found but I'm not sure we will catch due to async nature
+        // of this, hence why this is kept.
+        return driver.executeScript('return window.candidates');
+      })
+      .then(function(candidates) {
+        if (candidates.length === 0) {
+          t.pass('No candidates generated');
+        } else {
+          candidates.forEach(function(candidate) {
+            t.fail('Candidate found: ' + candidate);
+          });
+        }
+      })
+      .then(function() {
+        t.end();
+      })
+      .then(null, function(err) {
+        if (err !== 'skip-test') {
+          t.fail(err);
+        }
+        t.end();
+      });
+    });
+
+test('icegatheringstatechange event',
+    {skip: process.env.BROWSER !== 'MicrosoftEdge'},
+    function(t) {
+      var driver = seleniumHelpers.buildDriver();
+
+      // Define test.
+      var testDefinition = function() {
+        var callback = arguments[arguments.length - 1];
+
+        var pc1 = new RTCPeerConnection();
+        pc1.onicegatheringstatechange = function(event) {
+          if (pc1.iceGatheringState === 'complete') {
+            callback();
+          }
+        };
+
+        var constraints = {video: true, fake: true};
+        navigator.mediaDevices.getUserMedia(constraints)
+        .then(function(stream) {
+          pc1.addStream(stream);
+          pc1.createOffer().then(function(offer) {
+            return pc1.setLocalDescription(offer);
+          });
+        });
+      };
+
+      // Run test.
+      seleniumHelpers.loadTestPage(driver)
+      .then(function() {
+        return driver.executeAsyncScript(testDefinition);
+      })
+      .then(function() {
+        t.pass('gatheringstatechange fired and is \'complete\'');
+        t.end();
+      })
+      .then(null, function(err) {
+        if (err !== 'skip-test') {
+          t.fail(err);
+        }
+        t.end();
+      });
+    });
+
+test('static generateCertificate method', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.plan(2);
+    t.pass('Page loaded');
+  })
+  .then(function() {
+    return driver.executeScript(function() {
+      return (window.adapter.browserDetails.browser === 'chrome' &&
+          window.adapter.browserDetails.version >= 49) ||
+          (window.adapter.browserDetails.browser === 'firefox' &&
+          window.adapter.browserDetails.version > 38);
+    });
+  })
+  .then(function(isSupported) {
+    if (!isSupported) {
+      t.skip('generateCertificate not supported on < Chrome 49');
+      throw 'skip-test';
+    }
+    return driver.executeScript(
+      'return typeof RTCPeerConnection.generateCertificate === \'function\'');
+  })
+  .then(function(hasGenerateCertificateMethod) {
+    t.ok(hasGenerateCertificateMethod,
+        'RTCPeerConnection has generateCertificate method');
+  })
+  .then(function() {
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+test('addTrack getLocalStreams mapping', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  var testDefinition = function() {
+    var callback = arguments[arguments.length - 1];
+
+    window.testPassed = [];
+    window.testFailed = [];
+    var tc = {
+      ok: function(ok, msg) {
+        window[ok ? 'testPassed' : 'testFailed'].push(msg);
+      },
+      is: function(a, b, msg) {
+        this.ok((a === b), msg + ' - got ' + b);
+      },
+      pass: function(msg) {
+        this.ok(true, msg);
+      },
+      fail: function(msg) {
+        this.ok(false, msg);
+      }
+    };
+    var pc = new RTCPeerConnection();
+    navigator.mediaDevices.getUserMedia({audio: true})
+    .then(function(stream) {
+      tc.pass('got audio stream');
+      var sender = pc.addTrack(stream.getAudioTracks()[0], stream);
+      tc.ok(sender && sender.track, 'addTrack returned a sender with a track');
+      tc.ok(pc.getSenders().length === 1, 'getSenders returns one sender');
+      return navigator.mediaDevices.getUserMedia({video: true});
+    })
+    .then(function(stream) {
+      tc.pass('got video stream');
+      var track = stream.getVideoTracks()[0];
+      var localStream = pc.getLocalStreams()[0];
+      localStream.addTrack(track);
+      pc.addTrack(track, localStream);
+      tc.ok(pc.getLocalStreams().length === 1, 'still has one local stream');
+      tc.ok(pc.getLocalStreams()[0].getTracks().length ===
+          localStream.getTracks().length,
+          'stream has the right number of tracks');
+      tc.ok(pc.getSenders().length === 2, 'getSenders returns two senders');
+      return pc.createOffer();
+    })
+    .then(function() {
+      tc.pass('created offer');
+    })
+    .then(function() {
+      if (pc.removeStream) {
+        pc.removeStream(pc.getLocalStreams()[0]);
+        tc.ok(pc.getLocalStreams().length === 0,
+            'local streams is empty after removal');
+        tc.ok(pc.getSenders().length === 0,
+            'getSenders is empty after removal');
+      }
+    })
+    .then(function() {
+      callback();
+    })
+    .catch(function(err) {
+      callback(err.toString());
+    });
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.pass('Page loaded');
+    return driver.executeAsyncScript(testDefinition);
+  })
+  .then(function(error) {
+    // Callback will either return an error object or pc1ConnectionStatus.
+    if (error) {
+      throw (error);
+    }
+    return driver.executeScript('return window.testPassed');
+  })
+  .then(function(testPassed) {
+    return driver.executeScript('return window.testFailed')
+    .then(function(testFailed) {
+      for (var testPass = 0; testPass < testPassed.length; testPass++) {
+        t.pass(testPassed[testPass]);
+      }
+      for (var testFail = 0; testFail < testFailed.length; testFail++) {
+        t.fail(testFailed[testFail]);
+      }
+    });
+  })
+  .then(function() {
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
+
+// This MUST to be the last test since it loads adapter
+// again which may result in unintended behaviour.
+test('Non-module logging to console still works', function(t) {
+  var driver = seleniumHelpers.buildDriver();
+
+  var testDefinition = function() {
+    window.testsEqualArray = [];
+    window.logCount = 0;
+    var saveConsole = console.log.bind(console);
+    console.log = function() {
+      window.logCount++;
+    };
+
+    console.log('log me');
+    console.log = saveConsole;
+
+    // Check for existence of variables and functions from public API.
+    window.testsEqualArray.push([typeof RTCPeerConnection,'function',
+      'RTCPeerConnection is a function']);
+    window.testsEqualArray.push([typeof navigator.getUserMedia, 'function',
+      'getUserMedia is a function']);
+    window.testsEqualArray.push([typeof window.adapter.browserDetails.browser,
+      'string', 'browserDetails.browser browser is a string']);
+    window.testsEqualArray.push([typeof window.adapter.browserDetails.version,
+      'number', 'browserDetails.version is a number']);
+  };
+
+  // Run test.
+  seleniumHelpers.loadTestPage(driver)
+  .then(function() {
+    t.pass('Page loaded');
+    return driver.executeScript(testDefinition);
+  })
+  .then(function() {
+    return driver.executeScript('return window.testsEqualArray');
+  })
+  .then(function(testsEqualArray) {
+    testsEqualArray.forEach(function(resultEq) {
+      // resultEq contains an array of test data,
+      // test condition that should be equal and a success message.
+      // resultEq[0] = typeof report.
+      // resultEq[1] = test condition.
+      // resultEq[0] = Success message.
+      t.equal(resultEq[0], resultEq[1], resultEq[2]);
+    });
+  })
+  .then(function() {
+    return driver.executeScript('return window.logCount');
+  })
+  .then(function(logCount) {
+    t.ok(logCount > 0, 'A log message appeared on the console.');
+    t.end();
+  })
+  .then(null, function(err) {
+    if (err !== 'skip-test') {
+      t.fail(err);
+    }
+    t.end();
+  });
+});
diff --git a/test/testpage.html b/test/testpage.html
new file mode 100644
index 0000000..f783865
--- /dev/null
+++ b/test/testpage.html
@@ -0,0 +1,19 @@
+<html>
+<head>
+<meta charset="utf-8">
+<title>Test page for adapter.js</title>
+</head>
+<body>
+<h1>Test page for adapter.js</h1>
+<script src="../out/adapter.js"></script>
+The browser is: <span id="browser"></span>
+<br>
+The browser version is: <span id="browserversion"></span>
+<script>
+  var browser_display = document.getElementById('browser');
+  var version_display = document.getElementById('browserversion');
+  browser_display.innerHTML = adapter.browserDetails.browser;
+  version_display.innerHTML = adapter.browserDetails.version;
+</script>
+</body>
+</html>
diff --git a/test/unit/.eslintrc b/test/unit/.eslintrc
new file mode 100644
index 0000000..6b07904
--- /dev/null
+++ b/test/unit/.eslintrc
@@ -0,0 +1,7 @@
+{
+    "env": {
+        "mocha": true,
+        "browser": true
+    },
+    "rules": {}
+}
diff --git a/test/unit/adapter_factory.js b/test/unit/adapter_factory.js
new file mode 100644
index 0000000..d48cfff
--- /dev/null
+++ b/test/unit/adapter_factory.js
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* eslint-env node */
+const chai = require('chai');
+const expect = chai.expect;
+const sinon = require('sinon');
+const sinonChai = require('sinon-chai');
+chai.use(sinonChai);
+
+describe('adapter factory', () => {
+  const adapterFactory = require('../../src/js/adapter_factory.js');
+  const utils = require('../../src/js/utils.js');
+
+  let window;
+  beforeEach(() => {
+    window = {
+      RTCPeerConnection: sinon.stub(),
+    };
+  });
+  afterEach(() => {
+    utils.detectBrowser.restore();
+  });
+
+  ['Chrome', 'Firefox', 'Safari', 'Edge'].forEach(browser => {
+    it('does not shim ' + browser + ' when disabled', () => {
+      sinon.stub(utils, 'detectBrowser').returns({
+        browser: browser.toLowerCase()
+      });
+      let options = {};
+      options['shim' + browser] = false;
+      const adapter = adapterFactory(window, options);
+      expect(adapter).not.to.have.property('browserShim');
+    });
+  });
+});
diff --git a/test/unit/chrome.js b/test/unit/chrome.js
new file mode 100644
index 0000000..8b11b15
--- /dev/null
+++ b/test/unit/chrome.js
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* eslint-env node */
+const chai = require('chai');
+const expect = chai.expect;
+
+describe('Chrome shim', () => {
+  const shim = require('../../src/js/chrome/chrome_shim');
+  let window;
+
+  beforeEach(() => {
+    window = {
+      webkitRTCPeerConnection: function() {}
+    };
+  });
+
+  describe('shimPeerConnection', () => {
+    it('creates window.RTCPeerConnection', () => {
+      shim.shimPeerConnection(window);
+      expect(window.RTCPeerConnection).not.to.equal(undefined);
+    });
+  });
+});
diff --git a/test/unit/detectBrowser.js b/test/unit/detectBrowser.js
new file mode 100644
index 0000000..cbd612a
--- /dev/null
+++ b/test/unit/detectBrowser.js
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* eslint-env node */
+const chai = require('chai');
+const expect = chai.expect;
+
+describe('detectBrowser', () => {
+  const detectBrowser = require('../../src/js/utils.js').detectBrowser;
+  let window;
+  let navigator;
+
+  beforeEach(() => {
+    navigator = {};
+    window = {navigator};
+  });
+
+  it('detects Firefox if navigator.mozGetUserMedia exists', () => {
+    navigator.userAgent = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; ' +
+        'rv:44.0) Gecko/20100101 Firefox/44.0';
+    navigator.mozGetUserMedia = function() {};
+
+    const browserDetails = detectBrowser(window);
+    expect(browserDetails.browser).to.equal('firefox');
+    expect(browserDetails.version).to.equal(44);
+  });
+
+  it('detects Chrome if navigator.webkitGetUserMedia exists', () => {
+    navigator.userAgent = 'Mozilla/5.0 (X11; Linux x86_64) ' +
+        'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 ' +
+        'Safari/537.36';
+    navigator.webkitGetUserMedia = function() {};
+    window.webkitRTCPeerConnection = function() {};
+
+    const browserDetails = detectBrowser(window);
+    expect(browserDetails.browser).to.equal('chrome');
+    expect(browserDetails.version).to.equal(45);
+  });
+
+  it('detects Edge if navigator.mediaDevices exists', () => {
+    navigator.userAgent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' +
+        'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2486.0 ' +
+        'Safari/537.36 Edge/13.10547';
+    navigator.mediaDevices = function() {};
+
+    const browserDetails = detectBrowser(window);
+    expect(browserDetails.browser).to.equal('edge');
+    expect(browserDetails.version).to.equal(10547);
+  });
+
+  it('detects Safari if navigator.webkitGetUserMedia exists', () => {
+    navigator.userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) ' +
+          'AppleWebKit/604.1.6 (KHTML, like Gecko) Version/10.2 Safari/604.1.6';
+    navigator.webkitGetUserMedia = function() {};
+
+    const browserDetails = detectBrowser(window);
+    expect(browserDetails.browser).to.equal('safari');
+    expect(browserDetails.version).to.equal(604);
+  });
+
+  it('detects Safari if navigator.mediaDevices exists', () => {
+    navigator.userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) ' +
+          'AppleWebKit/604.1.6 (KHTML, like Gecko) Version/10.2 Safari/604.1.6';
+    navigator.mediaDevices = function() {};
+
+    const browserDetails = detectBrowser(window);
+    expect(browserDetails.browser).to.equal('safari');
+    expect(browserDetails.version).to.equal(604);
+  });
+});
diff --git a/test/unit/edge.js b/test/unit/edge.js
new file mode 100644
index 0000000..54136c5
--- /dev/null
+++ b/test/unit/edge.js
@@ -0,0 +1,1400 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* eslint-env node */
+const chai = require('chai');
+const expect = chai.expect;
+const sinon = require('sinon');
+const sinonChai = require('sinon-chai');
+chai.use(sinonChai);
+
+const SDPUtils = require('sdp');
+const EventEmitter = require('events');
+
+function mockORTC(window) {
+  // required by the shim to mock an EventEmitter.
+  global.document = {
+    createDocumentFragment: () => {
+      let e = new EventEmitter();
+      e.addEventListener = e.addListener.bind(e);
+      e.removeEventListener = e.removeListener.bind(e);
+      e.dispatchEvent = function(ev) {
+        e.emit(ev.type, ev);
+      };
+      return e;
+    }
+  };
+  global.Event = function(type) {
+    this.type = type;
+  };
+
+  window.setTimeout = global.setTimeout;
+
+  window.RTCSessionDescription = function(init) {
+    return init;
+  };
+
+  window.RTCIceGatherer = function() {
+    this.getLocalParameters = function() {
+      return {
+        usernameFragment: 'someufrag',
+        password: 'somepass'
+      };
+    };
+  };
+  window.RTCIceTransport = function() {
+    this.start = sinon.spy();
+    this.setRemoteCandidates = sinon.spy();
+  };
+  window.RTCDtlsTransport = function() {
+    this.start = function() {};
+    this.getLocalParameters = function() {
+      return {
+        role: 'auto',
+        fingerprints: [
+          {
+            algorithm: 'alg',
+            value: 'fi:ng:ger:pr:in:t1'
+          }
+        ]
+      };
+    };
+  };
+
+  window.RTCRtpReceiver = function(transport, kind) {
+    this.track = new window.MediaStreamTrack();
+    this.track.kind = kind;
+    this.transport = transport;
+
+    this.receive = function() {};
+    this.setTransport = function() {};
+  };
+  function getCapabilities(kind) {
+    var opus = {
+      name: 'opus',
+      kind: 'audio',
+      clockRate: 48000,
+      preferredPayloadType: 111,
+      numChannels: 2
+    };
+    var vp8 = {
+      name: 'vp8',
+      kind: 'video',
+      clockRate: 90000,
+      preferredPayloadType: 100,
+      numChannels: 1
+    };
+    var rtx = {
+      name: 'rtx',
+      kind: 'video',
+      clockRate: 90000,
+      preferredPayloadType: 101,
+      numChannels: 1,
+      parameters: {apt: 100}
+    };
+    var codecs;
+    switch (kind) {
+      case 'audio':
+        codecs = [opus];
+        break;
+      case 'video':
+        codecs = [vp8, rtx];
+        break;
+      default:
+        codecs = [opus, vp8, rtx];
+        break;
+    }
+    return {
+      codecs: codecs,
+      headerExtensions: []
+    };
+  }
+  window.RTCRtpReceiver.getCapabilities = getCapabilities;
+
+  window.RTCRtpSender = function(track, transport) {
+    this.track = track;
+    this.transport = transport;
+    this.send = function() {};
+    this.setTransport = function() {};
+  };
+  window.RTCRtpSender.getCapabilities = getCapabilities;
+
+  window.MediaStream = function(tracks) {
+    this.id = SDPUtils.generateIdentifier();
+    this._tracks = tracks || [];
+    this.getTracks = () => this._tracks;
+    this.getAudioTracks = () => this._tracks.filter(t => t.kind === 'audio');
+    this.getVideoTracks = () => this._tracks.filter(t => t.kind === 'video');
+    this.addTrack = (t) => this._tracks.push(t);
+  };
+  window.MediaStreamTrack = function() {
+    this.id = SDPUtils.generateIdentifier();
+  };
+}
+
+describe('Edge shim', () => {
+  const shim = require('../../src/js/edge/edge_shim');
+  let window;
+
+  const ua14392 = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' +
+      'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 ' +
+      'Safari/537.36 Edge/14.14392';
+
+  const ua15025 = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' +
+      'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 ' +
+      'Safari/537.36 Edge/15.15025';
+
+  beforeEach(() => {
+    window = {
+      navigator: {
+        userAgent: ua15025,
+        mediaDevices: function() {}
+      }
+    };
+    mockORTC(window);
+    shim.shimPeerConnection(window);
+  });
+
+  it('creates window.RTCPeerConnection', () => {
+    delete window.RTCPeerConnection;
+    shim.shimPeerConnection(window);
+    expect(window.RTCPeerConnection).not.to.equal(undefined);
+  });
+
+  it('overrides window.RTCPeerConnection if it exists', () => {
+    window.RTCPeerConnection = true;
+    shim.shimPeerConnection(window);
+    expect(window.RTCPeerConnection).not.to.equal(true);
+  });
+
+  describe('filtering of STUN and TURN servers', () => {
+    let pc;
+    it('filters STUN before r14393', () => {
+      window.navigator.userAgent = ua14392;
+      // need to re-evaluate after changing the browser version.
+      shim.shimPeerConnection(window);
+      pc = new window.RTCPeerConnection({
+        iceServers: [{urls: 'stun:stun.l.google.com'}]
+      });
+      expect(pc.iceOptions.iceServers).to.deep.equal([]);
+    });
+
+    it('does not filter STUN after r14393', () => {
+      pc = new window.RTCPeerConnection({
+        iceServers: [{urls: 'stun:stun.l.google.com'}]
+      });
+      expect(pc.iceOptions.iceServers).to.deep.equal([
+        {urls: 'stun:stun.l.google.com'}
+      ]);
+    });
+
+    it('filters incomplete TURN urls', () => {
+      pc = new window.RTCPeerConnection({
+        iceServers: [
+          {urls: 'turn:stun.l.google.com'},
+          {urls: 'turn:stun.l.google.com:19302'}
+        ]
+      });
+      expect(pc.iceOptions.iceServers).to.deep.equal([]);
+    });
+
+    it('filters TURN TCP', () => {
+      pc = new window.RTCPeerConnection({
+        iceServers: [
+          {urls: 'turn:stun.l.google.com:19302?transport=tcp'}
+        ]
+      });
+      expect(pc.iceOptions.iceServers).to.deep.equal([]);
+    });
+
+    describe('removes all but the first server of a type', () => {
+      it('in separate entries', () => {
+        pc = new window.RTCPeerConnection({
+          iceServers: [
+            {urls: 'stun:stun.l.google.com'},
+            {urls: 'turn:stun.l.google.com:19301?transport=udp'},
+            {urls: 'turn:stun.l.google.com:19302?transport=udp'}
+          ]
+        });
+        expect(pc.iceOptions.iceServers).to.deep.equal([
+          {urls: 'stun:stun.l.google.com'},
+          {urls: 'turn:stun.l.google.com:19301?transport=udp'}
+        ]);
+      });
+
+      it('in urls entries', () => {
+        pc = new window.RTCPeerConnection({
+          iceServers: [
+            {urls: 'stun:stun.l.google.com'},
+            {urls: [
+              'turn:stun.l.google.com:19301?transport=udp',
+              'turn:stun.l.google.com:19302?transport=udp'
+            ]}
+          ]
+        });
+        expect(pc.iceOptions.iceServers).to.deep.equal([
+          {urls: 'stun:stun.l.google.com'},
+          {urls: ['turn:stun.l.google.com:19301?transport=udp']}
+        ]);
+      });
+    });
+  });
+
+  describe('setLocalDescription', () => {
+    let pc;
+    beforeEach(() => {
+      pc = new window.RTCPeerConnection();
+    });
+
+    it('returns a promise', (done) => {
+      pc.createOffer({offerToReceiveAudio: 1})
+      .then((offer) => {
+        return pc.setLocalDescription(offer);
+      })
+      .then(done);
+    });
+
+    it('calls the legacy success callback', (done) => {
+      pc.createOffer({offerToReceiveAudio: 1})
+      .then((offer) => {
+        return pc.setLocalDescription(offer, done);
+      });
+    });
+
+    it('changes the signalingState to have-local-offer', (done) => {
+      pc.createOffer({offerToReceiveAudio: 1})
+      .then((offer) => {
+        return pc.setLocalDescription(offer);
+      })
+      .then(() => {
+        expect(pc.localDescription.type).to.equal('offer');
+        expect(pc.signalingState = 'have-local-offer');
+        done();
+      });
+    });
+
+    describe('ІnvalidStateError is thrown when called with', () => {
+      it('an answer in signalingState stable', (done) => {
+        pc.setRemoteDescription({type: 'answer'})
+        .catch((e) => {
+          expect(e.name).to.equal('InvalidStateError');
+          done();
+        });
+      });
+
+      it('an offer in signalingState have-local-offer', (done) => {
+        pc.createOffer({offerToReceiveAudio: 1})
+        .then((offer) => {
+          return pc.setLocalDescription(offer);
+        })
+        .then(() => {
+          return pc.setRemoteDescription({type: 'offer'});
+        })
+        .catch((e) => {
+          expect(e.name).to.equal('InvalidStateError');
+          done();
+        });
+      });
+    });
+  });
+
+  describe('setRemoteDescription', () => {
+    let pc;
+    beforeEach(() => {
+      pc = new window.RTCPeerConnection();
+    });
+
+    it('returns a promise', (done) => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n';
+      pc.setRemoteDescription({type: 'offer', sdp: sdp})
+      .then(done);
+    });
+    it('calls the legacy success callback', (done) => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n';
+      pc.setRemoteDescription({type: 'offer', sdp: sdp}, done);
+    });
+
+    it('changes the signalingState to have-remote-offer', (done) => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n';
+      pc.setRemoteDescription({type: 'offer', sdp: sdp})
+      .then(() => {
+        expect(pc.signalingState = 'have-remote-offer');
+        done();
+      });
+    });
+
+    it('sets the remoteDescription', (done) => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n';
+      pc.setRemoteDescription({type: 'offer', sdp: sdp}, () => {
+        expect(pc.remoteDescription.type).to.equal('offer');
+        expect(pc.remoteDescription.sdp).to.equal(sdp);
+        done();
+      });
+    });
+
+    describe('when called with an offer containing a track', () => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n' +
+          'm=audio 9 UDP/TLS/RTP/SAVPF 111\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:audio1\r\n' +
+          'a=sendonly\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:111 opus/48000\r\n' +
+          'a=ssrc:1001 msid:stream1 track1\r\n' +
+          'a=ssrc:1001 cname:some\r\n';
+      it('triggers onaddstream', (done) => {
+        pc.onaddstream = function(event) {
+          const stream = event.stream;
+          expect(stream.getTracks().length).to.equal(1);
+          expect(stream.getTracks()[0].kind).to.equal('audio');
+
+          done();
+        };
+        pc.setRemoteDescription({type: 'offer', sdp: sdp});
+      });
+
+      it('emits a addstream event', (done) => {
+        pc.addEventListener('addstream', function(event) {
+          const stream = event.stream;
+          expect(stream.getTracks().length).to.equal(1);
+          expect(stream.getTracks()[0].kind).to.equal('audio');
+
+          done();
+        });
+        pc.setRemoteDescription({type: 'offer', sdp: sdp});
+      });
+
+      it('triggers ontrack', (done) => {
+        pc.ontrack = function(event) {
+          expect(event.track.kind).to.equal('audio');
+          expect(event.receiver);
+          expect(event.streams.length).to.equal(1);
+
+          done();
+        };
+        pc.setRemoteDescription({type: 'offer', sdp: sdp});
+      });
+
+      it('emits a track event', (done) => {
+        pc.addEventListener('track', function(event) {
+          expect(event.track.kind).to.equal('audio');
+          expect(event.receiver);
+          expect(event.streams.length).to.equal(1);
+
+          done();
+        });
+        pc.setRemoteDescription({type: 'offer', sdp: sdp});
+      });
+    });
+
+    describe('when called with an offer without (explicit) tracks', () => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\n' +
+          // 'a=msid-semantic: WMS\r\n' + // no msid-semantic
+          'm=audio 9 UDP/TLS/RTP/SAVPF 111\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:audio1\r\n' +
+          'a=sendonly\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:111 opus/48000\r\n';
+
+      it('triggers onaddstream', (done) => {
+        pc.onaddstream = function(event) {
+          const stream = event.stream;
+          expect(stream.getTracks().length).to.equal(1);
+          expect(stream.getTracks()[0].kind).to.equal('audio');
+
+          done();
+        };
+        pc.setRemoteDescription({type: 'offer', sdp: sdp});
+      });
+
+      it('triggers ontrack', (done) => {
+        pc.ontrack = function(event) {
+          expect(event.track.kind).to.equal('audio');
+          expect(event.receiver);
+          expect(event.streams.length).to.equal(1);
+          done();
+        };
+        pc.setRemoteDescription({type: 'offer', sdp: sdp});
+      });
+    });
+
+    describe('when called with an offer containing multiple streams ' +
+        '/ tracks', () => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n' +
+          'm=audio 9 UDP/TLS/RTP/SAVPF 111\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:audio1\r\n' +
+          'a=sendonly\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:111 opus/48000\r\n' +
+          'a=ssrc:1001 msid:stream1 track1\r\n' +
+          'a=ssrc:1001 cname:some\r\n' +
+          'm=audio 9 UDP/TLS/RTP/SAVPF 111\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:audio1\r\n' +
+          'a=sendonly\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:111 opus/48000\r\n' +
+          'a=ssrc:2002 msid:stream2 track2\r\n' +
+          'a=ssrc:2002 cname:some\r\n';
+
+      it('triggers onaddstream twice', (done) => {
+        let numStreams = 0;
+        pc.onaddstream = function(event) {
+          numStreams++;
+          expect(event.stream.id).to.equal('stream' + numStreams);
+          if (numStreams === 2) {
+            done();
+          }
+        };
+        pc.setRemoteDescription({type: 'offer', sdp: sdp});
+      });
+
+      it('triggers ontrack twice', (done) => {
+        let numTracks = 0;
+        pc.ontrack = function(event) {
+          numTracks++;
+          expect(event.streams[0].id).to.equal('stream' + numTracks);
+          if (numTracks === 2) {
+            done();
+          }
+        };
+        pc.setRemoteDescription({type: 'offer', sdp: sdp});
+      });
+    });
+
+    // TODO: add a test for recvonly to show it doesn't trigger the callback.
+    //   probably easiest done using a sinon.stub
+    //
+    describe('sets the canTrickleIceCandidates property', () => {
+      it('to true when called with an offer that contains ' +
+          'a=ice-options:trickle', (done) => {
+        const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+            's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n' +
+            'a=ice-options:trickle\r\n';
+        pc.setRemoteDescription({type: 'offer', sdp: sdp})
+        .then(() => {
+          expect(pc.canTrickleIceCandidates).to.equal(true);
+          done();
+        });
+      });
+
+      it('to false when called with an offer that does not contain ' +
+          'a=ice-options:trickle', (done) => {
+        const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+            's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n';
+        pc.setRemoteDescription({type: 'offer', sdp: sdp})
+        .then(() => {
+          expect(pc.canTrickleIceCandidates).to.equal(false);
+          done();
+        });
+      });
+    });
+
+    describe('ІnvalidStateError is thrown when called with', () => {
+      it('an answer in signalingState stable', (done) => {
+        pc.setRemoteDescription({type: 'answer'})
+        .catch((e) => {
+          expect(e.name).to.equal('InvalidStateError');
+          done();
+        });
+      });
+
+      it('an offer in signalingState have-local-offer', (done) => {
+        pc.createOffer({offerToReceiveAudio: 1})
+        .then((offer) => {
+          return pc.setLocalDescription(offer);
+        })
+        .then(() => {
+          return pc.setRemoteDescription({type: 'offer'});
+        })
+        .catch((e) => {
+          expect(e.name).to.equal('InvalidStateError');
+          done();
+        });
+      });
+    });
+  });
+
+  describe('createOffer', () => {
+    let pc;
+    beforeEach(() => {
+      pc = new window.RTCPeerConnection();
+    });
+
+    it('returns a promise', (done) => {
+      pc.createOffer({offerToReceiveAudio: 1})
+      .then(() => {
+        done();
+      });
+    });
+    it('calls the legacy success callback', (done) => {
+      pc.createOffer({offerToReceiveAudio: 1})
+      .then(() => {
+        done();
+      });
+    });
+    it('does not change the signalingState', (done) => {
+      pc.createOffer({offerToReceiveAudio: 1})
+      .then(() => {
+        expect(pc.signalingState).to.equal('stable');
+        done();
+      });
+    });
+
+    describe('when called with offerToReceiveAudio', () => {
+      it('= 1 the generated SDP should contain one audio m-line', (done) => {
+        pc.createOffer({offerToReceiveAudio: 1})
+        .then((offer) => {
+          const sections = SDPUtils.splitSections(offer.sdp);
+          expect(sections.length).to.equal(2);
+          expect(SDPUtils.getDirection(sections[1])).to.equal('recvonly');
+          done();
+        });
+      });
+      it('= 2 the generated SDP should contain two audio m-lines', (done) => {
+        pc.createOffer({offerToReceiveAudio: 2})
+        .then((offer) => {
+          const sections = SDPUtils.splitSections(offer.sdp);
+          expect(sections.length).to.equal(3);
+          expect(SDPUtils.getDirection(sections[1])).to.equal('recvonly');
+          expect(SDPUtils.getDirection(sections[2])).to.equal('recvonly');
+          done();
+        });
+      });
+      it('= true the generated SDP should contain one audio m-line', (done) => {
+        pc.createOffer({offerToReceiveAudio: true})
+        .then((offer) => {
+          const sections = SDPUtils.splitSections(offer.sdp);
+          expect(sections.length).to.equal(2);
+          expect(SDPUtils.getDirection(sections[1])).to.equal('recvonly');
+          done();
+        });
+      });
+      it('= false the generated SDP should not offer to receive ' +
+          'audio', (done) => {
+        const audioTrack = new window.MediaStreamTrack();
+        audioTrack.kind = 'audio';
+        const stream = new window.MediaStream([audioTrack]);
+
+        pc.addStream(stream);
+        pc.createOffer({offerToReceiveAudio: false})
+        .then((offer) => {
+          const sections = SDPUtils.splitSections(offer.sdp);
+          expect(sections.length).to.equal(2);
+          expect(SDPUtils.getDirection(sections[1])).to.equal('sendonly');
+          done();
+        });
+      });
+    });
+
+    describe('when called with offerToReceiveVideo', () => {
+      it('the generated SDP should contain a video m-line', (done) => {
+        pc.createOffer({offerToReceiveVideo: 1})
+        .then((offer) => {
+          const sections = SDPUtils.splitSections(offer.sdp);
+          expect(sections.length).to.equal(2);
+          expect(SDPUtils.getDirection(sections[1])).to.equal('recvonly');
+          done();
+        });
+      });
+    });
+
+    describe('when called with offerToReceiveAudio and ' +
+        'offerToReceiveVideo', () => {
+      it('the generated SDP should contain two m-lines', (done) => {
+        pc.createOffer({offerToReceiveAudio: 1, offerToReceiveVideo: 1})
+        .then((offer) => {
+          const sections = SDPUtils.splitSections(offer.sdp);
+          expect(sections.length).to.equal(3);
+          expect(SDPUtils.getDirection(sections[1])).to.equal('recvonly');
+          expect(SDPUtils.getKind(sections[1])).to.equal('audio');
+          expect(SDPUtils.getDirection(sections[2])).to.equal('recvonly');
+          expect(SDPUtils.getKind(sections[2])).to.equal('video');
+          done();
+        });
+      });
+    });
+
+    describe('when called after adding a stream', () => {
+      describe('with an audio track', () => {
+        it('the generated SDP should contain an audio m-line', (done) => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const stream = new window.MediaStream([audioTrack]);
+
+          pc.addStream(stream);
+          pc.createOffer()
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(2);
+            expect(SDPUtils.getDirection(sections[1])).to.equal('sendrecv');
+            done();
+          });
+        });
+      });
+
+      describe('with an audio track not offering to receive audio', () => {
+        it('the generated SDP should contain a sendonly audio ' +
+            'm-line', (done) => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const stream = new window.MediaStream([audioTrack]);
+
+          pc.addStream(stream);
+          pc.createOffer({offerToReceiveAudio: 0})
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(2);
+            expect(SDPUtils.getDirection(sections[1])).to.equal('sendonly');
+            done();
+          });
+        });
+      });
+
+      describe('with an audio track and offering to receive video', () => {
+        it('the generated SDP should contain a recvonly m-line', (done) => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const stream = new window.MediaStream([audioTrack]);
+
+          pc.addStream(stream);
+          pc.createOffer({offerToReceiveVideo: 1})
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(3);
+            expect(SDPUtils.getKind(sections[1])).to.equal('audio');
+            expect(SDPUtils.getDirection(sections[1])).to.equal('sendrecv');
+            expect(SDPUtils.getKind(sections[2])).to.equal('video');
+            expect(SDPUtils.getDirection(sections[2])).to.equal('recvonly');
+            done();
+          });
+        });
+      });
+
+      describe('with a video track', () => {
+        it('the generated SDP should contain an video m-line', (done) => {
+          const videoTrack = new window.MediaStreamTrack();
+          videoTrack.kind = 'video';
+          const stream = new window.MediaStream([videoTrack]);
+
+          pc.addStream(stream);
+          pc.createOffer()
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(2);
+            expect(SDPUtils.getKind(sections[1])).to.equal('video');
+            done();
+          });
+        });
+      });
+
+      describe('with a video track and offerToReceiveAudio', () => {
+        it('the generated SDP should contain an audio and a ' +
+            'video m-line', (done) => {
+          const videoTrack = new window.MediaStreamTrack();
+          videoTrack.kind = 'video';
+          const stream = new window.MediaStream([videoTrack]);
+
+          pc.addStream(stream);
+          pc.createOffer({offerToReceiveAudio: 1})
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(3);
+            expect(SDPUtils.getKind(sections[1])).to.equal('audio');
+            expect(SDPUtils.getKind(sections[2])).to.equal('video');
+            done();
+          });
+        });
+      });
+
+
+      describe('with an audio track and a video track', () => {
+        it('the generated SDP should contain an audio and video ' +
+            'm-line', (done) => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const videoTrack = new window.MediaStreamTrack();
+          videoTrack.kind = 'video';
+          const stream = new window.MediaStream([audioTrack, videoTrack]);
+
+          pc.addStream(stream);
+          pc.createOffer()
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(3);
+            expect(SDPUtils.getKind(sections[1])).to.equal('audio');
+            expect(SDPUtils.getKind(sections[2])).to.equal('video');
+            done();
+          });
+        });
+      });
+
+      describe('with an audio track and two video tracks', () => {
+        it('the generated SDP should contain an audio and ' +
+            'video m-line', (done) => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const videoTrack = new window.MediaStreamTrack();
+          videoTrack.kind = 'video';
+          const videoTrack2 = new window.MediaStreamTrack();
+          videoTrack2.kind = 'video';
+          const stream = new window.MediaStream([audioTrack, videoTrack]);
+          const stream2 = new window.MediaStream([videoTrack2]);
+
+          pc.addStream(stream);
+          pc.addStream(stream2);
+          pc.createOffer()
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(4);
+            expect(SDPUtils.getKind(sections[1])).to.equal('audio');
+            expect(SDPUtils.getKind(sections[2])).to.equal('video');
+            expect(SDPUtils.getKind(sections[3])).to.equal('video');
+            done();
+          });
+        });
+      });
+    });
+
+    describe('when called after addTrack', () => {
+      describe('with an audio track', () => {
+        it('the generated SDP should contain an audio m-line', (done) => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const stream = new window.MediaStream([audioTrack]);
+
+          pc.addTrack(audioTrack, stream);
+          pc.createOffer()
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(2);
+            expect(SDPUtils.getDirection(sections[1])).to.equal('sendrecv');
+            done();
+          });
+        });
+      });
+
+      describe('with an audio track not offering to receive audio', () => {
+        it('the generated SDP should contain a sendonly audio ' +
+            'm-line', (done) => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const stream = new window.MediaStream([audioTrack]);
+
+          pc.addTrack(audioTrack, stream);
+          pc.createOffer({offerToReceiveAudio: 0})
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(2);
+            expect(SDPUtils.getDirection(sections[1])).to.equal('sendonly');
+            done();
+          });
+        });
+      });
+
+      describe('with an audio track and offering to receive video', () => {
+        it('the generated SDP should contain a recvonly m-line', (done) => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const stream = new window.MediaStream([audioTrack]);
+
+          pc.addTrack(audioTrack, stream);
+          pc.createOffer({offerToReceiveVideo: 1})
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(3);
+            expect(SDPUtils.getKind(sections[1])).to.equal('audio');
+            expect(SDPUtils.getDirection(sections[1])).to.equal('sendrecv');
+            expect(SDPUtils.getKind(sections[2])).to.equal('video');
+            expect(SDPUtils.getDirection(sections[2])).to.equal('recvonly');
+            done();
+          });
+        });
+      });
+
+      describe('with a video track', () => {
+        it('the generated SDP should contain an video m-line', (done) => {
+          const videoTrack = new window.MediaStreamTrack();
+          videoTrack.kind = 'video';
+          const stream = new window.MediaStream([videoTrack]);
+
+          pc.addTrack(videoTrack, stream);
+          pc.createOffer()
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(2);
+            expect(SDPUtils.getKind(sections[1])).to.equal('video');
+            done();
+          });
+        });
+      });
+
+      describe('with a video track and offerToReceiveAudio', () => {
+        it('the generated SDP should contain an audio and a ' +
+            'video m-line', (done) => {
+          const videoTrack = new window.MediaStreamTrack();
+          videoTrack.kind = 'video';
+          const stream = new window.MediaStream([videoTrack]);
+
+          pc.addTrack(videoTrack, stream);
+          pc.createOffer({offerToReceiveAudio: 1})
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(3);
+            expect(SDPUtils.getKind(sections[1])).to.equal('audio');
+            expect(SDPUtils.getKind(sections[2])).to.equal('video');
+            done();
+          });
+        });
+      });
+
+
+      describe('with an audio track and a video track', () => {
+        it('the generated SDP should contain an audio and video ' +
+            'm-line', (done) => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const videoTrack = new window.MediaStreamTrack();
+          videoTrack.kind = 'video';
+          const stream = new window.MediaStream([audioTrack, videoTrack]);
+
+          pc.addTrack(audioTrack, stream);
+          pc.addTrack(videoTrack, stream);
+          pc.createOffer()
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(3);
+            expect(SDPUtils.getKind(sections[1])).to.equal('audio');
+            expect(SDPUtils.getKind(sections[2])).to.equal('video');
+            done();
+          });
+        });
+      });
+
+      describe('with an audio track and two video tracks', () => {
+        it('the generated SDP should contain an audio and ' +
+            'video m-line', (done) => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const videoTrack = new window.MediaStreamTrack();
+          videoTrack.kind = 'video';
+          const videoTrack2 = new window.MediaStreamTrack();
+          videoTrack2.kind = 'video';
+          const stream = new window.MediaStream([audioTrack, videoTrack]);
+          const stream2 = new window.MediaStream([videoTrack2]);
+
+          pc.addTrack(audioTrack, stream);
+          pc.addTrack(videoTrack, stream);
+          pc.addTrack(videoTrack2, stream2);
+          pc.createOffer()
+          .then((offer) => {
+            const sections = SDPUtils.splitSections(offer.sdp);
+            expect(sections.length).to.equal(4);
+            expect(SDPUtils.getKind(sections[1])).to.equal('audio');
+            expect(SDPUtils.getKind(sections[2])).to.equal('video');
+            expect(SDPUtils.getKind(sections[3])).to.equal('video');
+            done();
+          });
+        });
+      });
+    });
+  });
+
+  describe('createAnswer', () => {
+    let pc;
+    beforeEach(() => {
+      pc = new window.RTCPeerConnection();
+    });
+
+    it('returns a promise', (done) => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n';
+      pc.setRemoteDescription({type: 'offer', sdp: sdp})
+      .then(() => {
+        return pc.createAnswer();
+      })
+      .then(() => {
+        done();
+      });
+    });
+    it('calls the legacy success callback', (done) => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n';
+      pc.setRemoteDescription({type: 'offer', sdp: sdp})
+      .then(() => {
+        return pc.createAnswer(() => {
+          done();
+        });
+      });
+    });
+
+    it('does not change the signaling state', (done) => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n';
+      pc.setRemoteDescription({type: 'offer', sdp: sdp})
+      .then(() => {
+        expect(pc.signalingState).to.equal('have-remote-offer');
+        return pc.createAnswer();
+      })
+      .then(() => {
+        expect(pc.signalingState).to.equal('have-remote-offer');
+        done();
+      });
+    });
+
+    it('uses payload types of offerer', (done) => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n' +
+          'm=audio 9 UDP/TLS/RTP/SAVPF 98\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:audio1\r\n' +
+          'a=sendrecv\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:98 opus/48000\r\n' +
+          'a=ssrc:1001 msid:stream1 track1\r\n' +
+          'a=ssrc:1001 cname:some\r\n';
+      pc.setRemoteDescription({type: 'offer', sdp: sdp})
+      .then(() => {
+        return pc.createAnswer();
+      })
+      .then((answer) => {
+        expect(answer.sdp).to.contain('a=rtpmap:98 opus');
+        done();
+      });
+    });
+
+    // test https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-15#section-5.3.4
+    describe('direction attribute', () => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n' +
+          'm=audio 9 UDP/TLS/RTP/SAVPF 111\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:audio1\r\n' +
+          'a=sendrecv\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:111 opus/48000\r\n' +
+          'a=ssrc:1001 msid:stream1 track1\r\n' +
+          'a=ssrc:1001 cname:some\r\n';
+
+      it('responds with a inactive answer to inactive', (done) => {
+        pc.setRemoteDescription({type: 'offer', sdp: sdp.replace('sendrecv',
+            'recvonly')})
+        .then(() => {
+          return pc.createAnswer();
+        })
+        .then((answer) => {
+          const sections = SDPUtils.splitSections(answer.sdp);
+          expect(sections.length).to.equal(2);
+          expect(SDPUtils.getDirection(sections[1])).to.equal('inactive');
+          done();
+        });
+      });
+
+      describe('with a local track', () => {
+        beforeEach(() => {
+          const audioTrack = new window.MediaStreamTrack();
+          audioTrack.kind = 'audio';
+          const stream = new window.MediaStream([audioTrack]);
+
+          pc.addStream(stream);
+        });
+
+        it('responds with a sendrecv answer to sendrecv', (done) => {
+          pc.setRemoteDescription({type: 'offer', sdp: sdp})
+          .then(() => {
+            return pc.createAnswer();
+          })
+          .then((answer) => {
+            const sections = SDPUtils.splitSections(answer.sdp);
+            expect(SDPUtils.getDirection(sections[1])).to.equal('sendrecv');
+            done();
+          });
+        });
+
+        it('responds with a sendonly answer to recvonly', (done) => {
+          pc.setRemoteDescription({type: 'offer', sdp: sdp.replace('sendrecv',
+              'recvonly')})
+          .then(() => {
+            return pc.createAnswer();
+          })
+          .then((answer) => {
+            const sections = SDPUtils.splitSections(answer.sdp);
+            expect(sections.length).to.equal(2);
+            expect(SDPUtils.getDirection(sections[1])).to.equal('sendonly');
+            done();
+          });
+        });
+      });
+
+      describe('with a local track added after setRemoteDescription', () => {
+        it('responds with a sendrecv answer to sendrecv', (done) => {
+          pc.setRemoteDescription({type: 'offer', sdp: sdp})
+          .then(() => {
+            const audioTrack = new window.MediaStreamTrack();
+            audioTrack.kind = 'audio';
+            const stream = new window.MediaStream([audioTrack]);
+
+            pc.addStream(stream);
+            return pc.createAnswer();
+          })
+          .then((answer) => {
+            const sections = SDPUtils.splitSections(answer.sdp);
+            expect(SDPUtils.getDirection(sections[1])).to.equal('sendrecv');
+            done();
+          });
+        });
+
+        it('responds with a sendonly answer to recvonly', (done) => {
+          pc.setRemoteDescription({type: 'offer', sdp: sdp.replace('sendrecv',
+              'recvonly')})
+          .then(() => {
+            const audioTrack = new window.MediaStreamTrack();
+            audioTrack.kind = 'audio';
+            const stream = new window.MediaStream([audioTrack]);
+
+            pc.addStream(stream);
+            return pc.createAnswer();
+          })
+          .then((answer) => {
+            const sections = SDPUtils.splitSections(answer.sdp);
+            expect(sections.length).to.equal(2);
+            expect(SDPUtils.getDirection(sections[1])).to.equal('sendonly');
+            done();
+          });
+        });
+      });
+
+      describe('with no local track', () => {
+        it('responds with a recvonly answer to sendrecv', (done) => {
+          pc.setRemoteDescription({type: 'offer', sdp: sdp})
+          .then(() => {
+            return pc.createAnswer();
+          })
+          .then((answer) => {
+            const sections = SDPUtils.splitSections(answer.sdp);
+            expect(SDPUtils.getDirection(sections[1])).to.equal('recvonly');
+            done();
+          });
+        });
+
+        it('responds with a inactive answer to recvonly', (done) => {
+          pc.setRemoteDescription({type: 'offer', sdp: sdp.replace('sendrecv',
+              'recvonly')})
+          .then(() => {
+            return pc.createAnswer();
+          })
+          .then((answer) => {
+            const sections = SDPUtils.splitSections(answer.sdp);
+            expect(SDPUtils.getDirection(sections[1])).to.equal('inactive');
+            done();
+          });
+        });
+      });
+    });
+
+    describe('after a video offer with RTX', () => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n' +
+          'm=video 9 UDP/TLS/RTP/SAVPF 102 103\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:video1\r\n' +
+          'a=sendrecv\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:102 vp8/90000\r\n' +
+          'a=rtpmap:103 rtx/90000\r\n' +
+          'a=fmtp:103 apt=102\r\n' +
+          'a=ssrc-group:FID 1001 1002\r\n' +
+          'a=ssrc:1001 msid:stream1 track1\r\n' +
+          'a=ssrc:1001 cname:some\r\n' +
+          'a=ssrc:1002 msid:stream1 track1\r\n' +
+          'a=ssrc:1002 cname:some\r\n';
+      describe('with no local track', () => {
+        it('creates an answer with RTX but no FID group', (done) => {
+          pc.setRemoteDescription({type: 'offer', sdp: sdp})
+          .then(() => {
+            return pc.createAnswer();
+          })
+          .then((answer) => {
+            expect(answer.sdp).to.contain('a=rtpmap:102 vp8');
+            expect(answer.sdp).to.contain('a=rtpmap:103 rtx');
+            expect(answer.sdp).to.contain('a=fmtp:103 apt=102');
+            expect(answer.sdp).not.to.contain('a=ssrc-group:FID');
+            done();
+          });
+        });
+      });
+
+      describe('with a local track', () => {
+        beforeEach(() => {
+          const videoTrack = new window.MediaStreamTrack();
+          videoTrack.kind = 'video';
+          const stream = new window.MediaStream([videoTrack]);
+
+          pc.addStream(stream);
+        });
+        it('creates an answer with RTX', (done) => {
+          pc.setRemoteDescription({type: 'offer', sdp: sdp})
+          .then(() => {
+            return pc.createAnswer();
+          })
+          .then((answer) => {
+            expect(answer.sdp).to.contain('a=rtpmap:102 vp8');
+            expect(answer.sdp).to.contain('a=rtpmap:103 rtx');
+            expect(answer.sdp).to.contain('a=fmtp:103 apt=102');
+            expect(answer.sdp).to.contain('a=ssrc-group:FID 2002 2003');
+            done();
+          });
+        });
+      });
+    });
+
+    describe('after a video offer without RTX', () => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n' +
+          'm=video 9 UDP/TLS/RTP/SAVPF 102\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:video1\r\n' +
+          'a=sendrecv\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:102 vp8/90000\r\n' +
+          'a=ssrc:1001 msid:stream1 track1\r\n' +
+          'a=ssrc:1001 cname:some\r\n';
+      it('there is no ssrc-group in the answer', (done) => {
+        const videoTrack = new window.MediaStreamTrack();
+        videoTrack.kind = 'video';
+        const stream = new window.MediaStream([videoTrack]);
+
+        pc.addStream(stream);
+
+        pc.setRemoteDescription({type: 'offer', sdp: sdp})
+        .then(() => {
+          return pc.createAnswer();
+        })
+        .then((answer) => {
+          expect(answer.sdp).not.to.contain('a=ssrc-group:FID ');
+          done();
+        });
+      });
+    });
+
+    describe('rtcp-rsize is', () => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n' +
+          'm=audio 9 UDP/TLS/RTP/SAVPF 98\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:audio1\r\n' +
+          'a=sendrecv\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:98 opus/48000\r\n' +
+          'a=ssrc:1001 msid:stream1 track1\r\n' +
+          'a=ssrc:1001 cname:some\r\n';
+
+      it('set if the offer contained rtcp-rsize', (done) => {
+        pc.setRemoteDescription({type: 'offer', sdp: sdp})
+        .then(() => {
+          return pc.createAnswer();
+        })
+        .then((answer) => {
+          expect(answer.sdp).to.contain('a=rtcp-rsize\r\n');
+          done();
+        });
+      });
+
+      it('not set if the offer did not contain rtcp-rsize', (done) => {
+        pc.setRemoteDescription({type: 'offer',
+          sdp: sdp.replace('a=rtcp-rsize\r\n', '')})
+        .then(() => {
+          return pc.createAnswer();
+        })
+        .then((answer) => {
+          expect(answer.sdp).not.to.contain('a=rtcp-rsize\r\n');
+          done();
+        });
+      });
+    });
+  });
+
+  describe('full cycle', () => {
+    let pc1;
+    let pc2;
+    beforeEach(() => {
+      pc1 = new window.RTCPeerConnection();
+      pc2 = new window.RTCPeerConnection();
+    });
+    it('completes a full createOffer-SLD-SRD-createAnswer-SLD-SRD ' +
+       'cycle', (done) => {
+      const audioTrack = new window.MediaStreamTrack();
+      audioTrack.kind = 'audio';
+      const stream = new window.MediaStream([audioTrack]);
+
+      pc1.addStream(stream);
+      pc2.addStream(stream);
+
+      pc1.createOffer()
+      .then((offer) => pc1.setLocalDescription(offer))
+      .then(() => pc2.setRemoteDescription(pc1.localDescription))
+      .then(() => pc2.createAnswer())
+      .then((answer) => pc2.setLocalDescription(answer))
+      .then(() => pc1.setRemoteDescription(pc2.localDescription))
+      .then(() => {
+        expect(pc1.signalingState).to.equal('stable');
+        expect(pc2.signalingState).to.equal('stable');
+        done();
+      });
+    });
+  });
+
+  describe('bundlePolicy', () => {
+    it('creates an offer with a=group:BUNDLE by default', (done) => {
+      const pc = new window.RTCPeerConnection();
+
+      pc.createOffer({offerToReceiveAudio: 1})
+      .then((offer) => {
+        expect(offer.sdp).to.contain('a=group:BUNDLE');
+        done();
+      });
+    });
+
+    it('max-compat creates an offer without a=group:BUNDLE', (done) => {
+      const pc = new window.RTCPeerConnection({bundlePolicy: 'max-compat'});
+
+      pc.createOffer({offerToReceiveAudio: 1})
+      .then((offer) => {
+        expect(offer.sdp).not.to.contain('a=group:BUNDLE');
+        done();
+      });
+    });
+
+    describe('sdp with a=group:BUNDLE and multiple media sections', () => {
+      const sdp = 'v=0\r\no=- 166855176514521964 2 IN IP4 127.0.0.1\r\n' +
+          's=-\r\nt=0 0\r\na=msid-semantic: WMS\r\n' +
+          'a=group:BUNDLE foo\r\n' +
+          'm=audio 9 UDP/TLS/RTP/SAVPF 111\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=candidate:12345 1 UDP 12345 127.0.0.1 1234 typ host ' +
+            'generation 0\r\n' +
+          'a=end-of-candidates\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:audio1\r\n' +
+          'a=sendonly\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:111 opus/48000\r\n' +
+          'a=ssrc:1001 msid:stream1 track1\r\n' +
+          'a=ssrc:1001 cname:some\r\n' +
+          'm=video 9 UDP/TLS/RTP/SAVPF 102\r\n' +
+          'c=IN IP4 0.0.0.0\r\n' +
+          'a=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:foo\r\na=ice-pwd:bar\r\n' +
+          'a=candidate:12345 1 UDP 12345 127.0.0.1 1234 typ host ' +
+            'generation 0\r\n' +
+          'a=end-of-candidates\r\n' +
+          'a=fingerprint:sha-256 so:me:co:lo:ns\r\n' +
+          'a=setup:actpass\r\n' +
+          'a=mid:video1\r\n' +
+          'a=sendrecv\r\na=rtcp-mux\r\n' +
+          'a=rtcp-rsize\r\n' +
+          'a=rtpmap:102 vp8/90000\r\n' +
+          'a=ssrc:1001 msid:stream1 track1\r\n' +
+          'a=ssrc:1001 cname:some\r\n';
+      let pc;
+
+      beforeEach(() => {
+        pc = new window.RTCPeerConnection();
+        const audioTrack = new window.MediaStreamTrack();
+        const videoTrack = new window.MediaStreamTrack();
+        audioTrack.kind = 'audio';
+        videoTrack.kind = 'video';
+        const stream = new window.MediaStream([audioTrack, videoTrack]);
+        pc.addTrack(audioTrack, stream);
+        pc.addTrack(videoTrack, stream);
+      });
+
+      function assertSetRemoteCandidatesLimit() {
+        const [firstTransceiver] = pc.transceivers;
+        pc.transceivers.forEach((transceiver) => {
+          expect(transceiver.iceTransport)
+            .to.equal(firstTransceiver.iceTransport);
+        });
+
+        const {iceTransport} = firstTransceiver;
+        expect(iceTransport.setRemoteCandidates).to.have.callCount(1);
+      }
+
+      it('add remote candidates only once when setting remote offer', () => {
+        return pc.setRemoteDescription({type: 'offer', sdp})
+        .then(assertSetRemoteCandidatesLimit);
+      });
+
+      it('add remote candidates only once when setting remote answer', () => {
+        return pc.createOffer()
+        .then((offer) => pc.setLocalDescription(offer))
+        .then(() => pc.setRemoteDescription({type: 'answer', sdp}))
+        .then(assertSetRemoteCandidatesLimit);
+      });
+    });
+  });
+
+  describe('negotationneeded', () => {
+    it('fires asynchronously after addTrack', (done) => {
+      const pc = new window.RTCPeerConnection();
+
+      const audioTrack = new window.MediaStreamTrack();
+      audioTrack.kind = 'audio';
+      const videoTrack = new window.MediaStreamTrack();
+      videoTrack.kind = 'video';
+      const stream = new window.MediaStream([audioTrack, videoTrack]);
+
+      pc.onnegotiationneeded = function(e) {
+        pc.createOffer()
+        .then((offer) => {
+          const sections = SDPUtils.splitSections(offer.sdp);
+          expect(sections.length).to.equal(3);
+          done();
+        });
+      };
+      pc.addTrack(audioTrack, stream); // onn should not execute now.
+      pc.addTrack(videoTrack, stream); // but after this.
+    });
+  });
+});
diff --git a/test/unit/extractVersion.js b/test/unit/extractVersion.js
new file mode 100644
index 0000000..db55ea2
--- /dev/null
+++ b/test/unit/extractVersion.js
@@ -0,0 +1,167 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* eslint-env node */
+const chai = require('chai');
+const expect = chai.expect;
+
+describe('extractVersion', () => {
+  const extractVersion = require('../../src/js/utils.js').extractVersion;
+
+  let ua;
+  describe('Chrome regular expression', () => {
+    const expr = /Chrom(e|ium)\/(\d+)\./;
+
+    it('matches Chrome', () => {
+      ua = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like ' +
+          'Gecko) Chrome/45.0.2454.101 Safari/537.36';
+      expect(extractVersion(ua, expr, 2)).to.equal(45);
+    });
+
+    it('matches Chromium', () => {
+      ua = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like ' +
+          'Gecko) Ubuntu Chromium/45.0.2454.85 Chrome/45.0.2454.85 ' +
+          'Safari/537.36';
+      expect(extractVersion(ua, expr, 2)).to.equal(45);
+    });
+
+    it('matches Chrome on Android', () => {
+      ua = 'Mozilla/5.0 (Linux; Android 4.3; Nexus 10 Build/JSS15Q) ' +
+          'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2307.2 ' +
+          'Safari/537.36';
+      expect(extractVersion(ua, expr, 2)).to.equal(42);
+    });
+
+    it('recognizes Opera as Chrome', () => {
+      // Opera, should match chrome/webrtc version 45.0 not Opera 32.0.
+      ua = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, ' +
+          'like Gecko) Chrome/45.0.2454.85 Safari/537.36 OPR/32.0.1948.44';
+      expect(extractVersion(ua, /Chrom(e|ium)\/(\d+)\./, 2)).to.equal(45);
+    });
+
+    it('does not match Firefox', () => {
+      ua = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:44.0) Gecko/20100101 ' +
+          'Firefox/44.0';
+      expect(extractVersion(ua, expr, 2)).to.equal(null);
+    });
+
+    it('does not match Safari', () => {
+      ua = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) ' +
+          'AppleWebKit/604.1.6 (KHTML, like Gecko) Version/10.2 Safari/604.1.6';
+      expect(extractVersion(ua, expr, 2)).to.equal(null);
+    });
+
+    it('does match Edge (by design, do not use for Edge)', () => {
+      ua = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 ' +
+          '(KHTML, like Gecko) Chrome/46.0.2486.0 Safari/537.36 Edge/13.10547';
+      expect(extractVersion(ua, expr, 2)).to.equal(46);
+    });
+
+    it('does not match non-Chrome', () => {
+      ua = 'Mozilla/5.0 (Linux; U; en-us; KFAPWI Build/JDQ39) ' +
+          'AppleWebKit/535.19 KHTML, like Gecko) Silk/3.13 Safari/535.19 ' +
+          'Silk-Accelerated=true';
+      expect(extractVersion(ua, expr, 2)).to.equal(null);
+    });
+
+    it('does not match the iPhone simulator', () => {
+      ua = 'Mozilla/5.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) ' +
+          'AppleWebKit/600.1.3 (KHTML, like Gecko) Version/8.0 ' +
+          'Mobile/12A4345d Safari/600.1.4';
+      expect(extractVersion(ua, expr, 1)).to.equal(null);
+    });
+  });
+
+  describe('Firefox regular expression', () => {
+    const expr = /Firefox\/(\d+)\./;
+    it('matches Firefox', () => {
+      ua = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:44.0) Gecko/20100101 ' +
+          'Firefox/44.0';
+      expect(extractVersion(ua, expr, 1)).to.equal(44);
+    });
+
+    it('does not match Chrome', () => {
+      ua = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like ' +
+          'Gecko) Chrome/45.0.2454.101 Safari/537.36';
+      expect(extractVersion(ua, expr, 1)).to.equal(null);
+    });
+
+    it('does not match Safari', () => {
+      ua = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) ' +
+          'AppleWebKit/604.1.6 (KHTML, like Gecko) Version/10.2 Safari/604.1.6';
+      expect(extractVersion(ua, expr, 1)).to.equal(null);
+    });
+
+    it('does not match Edge', () => {
+      ua = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 ' +
+          '(KHTML, like Gecko) Chrome/46.0.2486.0 Safari/537.36 Edge/13.10547';
+      expect(extractVersion(ua, expr, 1)).to.equal(null);
+    });
+  });
+
+  describe('Edge regular expression', () => {
+    const expr = /Edge\/(\d+).(\d+)$/;
+    it ('matches the Edge build number', () => {
+      ua = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 ' +
+          '(KHTML, like Gecko) Chrome/46.0.2486.0 Safari/537.36 Edge/13.10547';
+      expect(extractVersion(ua, expr, 2)).to.equal(10547);
+    });
+
+    it('does not match Chrome', () => {
+      ua = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like ' +
+          'Gecko) Chrome/45.0.2454.101 Safari/537.36';
+      expect(extractVersion(ua, expr, 2)).to.equal(null);
+    });
+
+    it('does not match Firefox', () => {
+      ua = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:44.0) Gecko/20100101 ' +
+          'Firefox/44.0';
+      expect(extractVersion(ua, expr, 2)).to.equal(null);
+    });
+
+    it('does not match Safari', () => {
+      ua = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) ' +
+          'AppleWebKit/604.1.6 (KHTML, like Gecko) Version/10.2 Safari/604.1.6';
+      expect(extractVersion(ua, expr, 2)).to.equal(null);
+    });
+  });
+
+  describe('Safari regular expression', () => {
+    const expr = /AppleWebKit\/(\d+)/;
+    it('matches the webkit version', () => {
+      ua = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) ' +
+          'AppleWebKit/604.1.6 (KHTML, like Gecko) Version/10.2 Safari/604.1.6';
+      expect(extractVersion(ua, expr, 1)).to.equal(604);
+    });
+
+    it('matches the iphone simulator', () => {
+      ua = 'Mozilla/5.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) ' +
+          'AppleWebKit/600.1.3 (KHTML, like Gecko) Version/8.0 ' +
+          'Mobile/12A4345d Safari/600.1.4';
+      expect(extractVersion(ua, expr, 1)).to.equal(600);
+    });
+
+    it('matches Chrome (by design, do not use for Chrome)', () => {
+      ua = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like ' +
+          'Gecko) Chrome/45.0.2454.101 Safari/537.36';
+      expect(extractVersion(ua, expr, 1)).to.equal(537);
+    });
+
+    it('matches Edge (by design, do not use for Edge', () => {
+      ua = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 ' +
+          '(KHTML, like Gecko) Chrome/46.0.2486.0 Safari/537.36 Edge/13.10547';
+      expect(extractVersion(ua, expr, 1)).to.equal(537);
+    });
+
+    it('does not match Firefox', () => {
+      ua = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:44.0) Gecko/20100101 ' +
+          'Firefox/44.0';
+      expect(extractVersion(ua, expr, 1)).to.equal(null);
+    });
+  });
+});
+
diff --git a/test/unit/firefox.js b/test/unit/firefox.js
new file mode 100644
index 0000000..c8fd015
--- /dev/null
+++ b/test/unit/firefox.js
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* eslint-env node */
+const chai = require('chai');
+const expect = chai.expect;
+
+describe('Firefox shim', () => {
+  const shim = require('../../src/js/firefox/firefox_shim');
+  let window;
+
+  beforeEach(() => {
+    window = {
+      mozRTCPeerConnection: function() {},
+      mozRTCSessionDescription: function() {},
+      mozRTCIceCandidate: function() {}
+    };
+  });
+
+  describe('shimPeerConnection', () => {
+    it('creates window.RTCPeerConnection', () => {
+      shim.shimPeerConnection(window);
+      expect(window.RTCPeerConnection).not.to.equal(undefined);
+    });
+
+    it('does not override window.RTCPeerConnection if it exists', () => {
+      const pc = function() {};
+      window.RTCPeerConnection = pc;
+      shim.shimPeerConnection(window);
+      expect(window.RTCPeerConnection).to.equal(pc);
+    });
+  });
+});
diff --git a/test/unit/getusermedia-constraints.js b/test/unit/getusermedia-constraints.js
new file mode 100644
index 0000000..cf75473
--- /dev/null
+++ b/test/unit/getusermedia-constraints.js
@@ -0,0 +1,216 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* eslint-env node */
+const chai = require('chai');
+const expect = chai.expect;
+const sinon = require('sinon');
+const sinonChai = require('sinon-chai');
+chai.use(sinonChai);
+
+describe('Chrome getUserMedia constraints converter', () => {
+  const shim = require('../../src/js/chrome/getusermedia');
+  let window;
+
+  beforeEach(() => {
+    window = {
+      navigator: {
+        webkitGetUserMedia: sinon.stub(),
+        userAgent: 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) ' +
+            'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.3029.110 ' +
+            'Safari/537.36'
+      }
+    };
+    shim(window);
+  });
+
+  it('back-converts spec video constraints', () => {
+    window.navigator.getUserMedia({
+      video: {
+        width: 1280,
+        height: {min: 200, ideal: 720, max: 1080},
+        frameRate: {exact: 50}
+      }
+    });
+    expect(window.navigator.webkitGetUserMedia).to.have.been.calledWith({
+      video: {
+        mandatory: {
+          maxFrameRate: 50,
+          maxHeight: 1080,
+          minHeight: 200,
+          minFrameRate: 50
+        },
+        optional: [
+                {minWidth: 1280},
+                {maxWidth: 1280},
+                {minHeight: 720},
+                {maxHeight: 720}
+        ]
+      }
+    });
+  });
+
+  it('back-converts spec audio constraints', () => {
+    window.navigator.getUserMedia({
+      audio: {
+        autoGainControl: true,
+        echoCancellation: false,
+        noiseSuppression: {exact: false},
+      }
+    });
+    expect(window.navigator.webkitGetUserMedia).to.have.been.calledWith({
+      audio: {
+        mandatory: {
+          googNoiseSuppression: false
+        },
+        optional: [
+          {echoCancellation: false},
+          {googAutoGainControl: true},
+        ]
+      }
+    });
+  });
+
+  it('passes legacy video constraints through', () => {
+    const legacy = {
+      video: {
+        mandatory: {
+          maxFrameRate: 50,
+          maxHeight: 1080,
+          minHeight: 200,
+          minFrameRate: 50
+        },
+        optional: [
+                {minWidth: 1280},
+                {maxWidth: 1280},
+                {minHeight: 720},
+                {maxHeight: 720}
+        ]
+      }
+    };
+    window.navigator.getUserMedia(legacy);
+    expect(window.navigator.webkitGetUserMedia).to.have.been.calledWith(legacy);
+  });
+
+  it('passes legacy audio constraints through', () => {
+    const legacy = {
+      audio: {
+        mandatory: {
+          googNoiseSuppression: false
+        },
+        optional: [
+          {echoCancellation: false},
+          {googAutoGainControl: true},
+        ]
+      }
+    };
+    window.navigator.getUserMedia(legacy);
+    expect(window.navigator.webkitGetUserMedia).to.have.been.calledWith(legacy);
+  });
+
+  it('does not choke on common unknown constraints', () => {
+    window.navigator.getUserMedia({
+      video: {
+        mediaSource: 'screen',
+        advanced: [
+                {facingMode: 'user'}
+        ],
+        require: ['height', 'frameRate']
+      }
+    });
+    expect(window.navigator.webkitGetUserMedia).to.have.been.calledWith({
+      video: {
+        optional: [
+                {facingMode: 'user'}
+        ]
+      }
+    });
+  });
+});
+
+describe('Firefox getUserMedia constraints converter', () => {
+  const shim = require('../../src/js/firefox/getusermedia');
+  let window;
+
+  beforeEach(() => {
+    window = {
+      navigator: {
+        mozGetUserMedia: sinon.stub()
+      }
+    };
+  });
+
+  describe('in Firefox 37', () => {
+    beforeEach(() => {
+      window.navigator.userAgent = 'Mozilla/5.0 (Macintosh; Intel ' +
+          'Mac OS X 10.12; rv:37.0) Gecko/20100101 Firefox/37.0';
+      shim(window);
+    });
+
+    it('converts spec-constraints to legacy constraints', () => {
+      window.navigator.getUserMedia({
+        video: {
+          mediaSource: 'screen',
+          width: 1280,
+          height: {min: 200, ideal: 720, max: 1080},
+          facingMode: 'user',
+          frameRate: {exact: 50}
+        }
+      });
+      expect(window.navigator.mozGetUserMedia).to.have.been.calledWith({
+        video: {
+          mediaSource: 'screen',
+          height: {min: 200, max: 1080},
+          frameRate: {max: 50, min: 50},
+          advanced: [
+                  {width: {min: 1280, max: 1280}},
+                  {height: {min: 720, max: 720}},
+                  {facingMode: 'user'}
+          ],
+          require: ['height', 'frameRate']
+        }
+      });
+    });
+
+    it('passes legacy constraints through', () => {
+      const legacy = {
+        video: {
+          height: {min: 200, max: 1080},
+          frameRate: {max: 50, min: 50},
+          advanced: [
+                  {width: {min: 1280, max: 1280}},
+                  {height: {min: 720, max: 720}},
+                  {facingMode: 'user'}
+          ],
+          require: ['height', 'frameRate']
+        }
+      };
+      window.navigator.getUserMedia(legacy);
+      expect(window.navigator.mozGetUserMedia).to.have.been.calledWith(legacy);
+    });
+  });
+
+  describe('in Firefox 38+', () => {
+    beforeEach(() => {
+      window.navigator.userAgent = 'Mozilla/5.0 (Macintosh; Intel ' +
+          'Mac OS X 10.12; rv:38.0) Gecko/20100101 Firefox/38.0';
+      shim(window);
+    });
+    it('passes through spec-constraints', () => {
+      const spec = {video: {
+        mediaSource: 'screen',
+        width: 1280,
+        height: {min: 200, ideal: 720, max: 1080},
+        facingMode: 'user',
+        frameRate: {exact: 50}
+      }
+      };
+      window.navigator.getUserMedia(spec);
+      expect(window.navigator.mozGetUserMedia).to.have.been.calledWith(spec);
+    });
+  });
+});
diff --git a/test/unit/logSuppression.js b/test/unit/logSuppression.js
new file mode 100644
index 0000000..edd9448
--- /dev/null
+++ b/test/unit/logSuppression.js
@@ -0,0 +1,44 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* eslint-env node */
+const chai = require('chai');
+const expect = chai.expect;
+
+describe('Log suppression', () => {
+  const utils = require('../../src/js/utils.js');
+  const saveConsole = console.log.bind(console);
+
+  let logCount;
+  beforeEach(() => {
+    logCount = 0;
+    console.log = function() {
+      if (arguments.length === 1 && arguments[0] === 'test') {
+        logCount++;
+      } else {
+        saveConsole.apply(saveConsole, arguments);
+      }
+    };
+    global.window = {};
+    require('../../out/adapter.js');
+  });
+
+  afterEach(() => {
+    console.log = saveConsole;
+    delete global.window;
+  });
+
+  it('does not call console.log by default', () => {
+    utils.log('test');
+    expect(logCount).to.equal(0);
+  });
+  it('does call console.log when enabled', () => {
+    utils.disableLog(false);
+    utils.log('test');
+    expect(logCount).to.equal(1);
+  });
+});
diff --git a/test/unit/safari.js b/test/unit/safari.js
new file mode 100644
index 0000000..e2c40d1
--- /dev/null
+++ b/test/unit/safari.js
@@ -0,0 +1,188 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* eslint-env node */
+const chai = require('chai');
+const expect = chai.expect;
+const sinon = require('sinon');
+const sinonChai = require('sinon-chai');
+chai.use(sinonChai);
+
+describe('Safari shim', () => {
+  const shim = require('../../src/js/safari/safari_shim');
+  let window;
+
+  beforeEach(() => {
+    window = {
+      RTCPeerConnection: sinon.stub()
+    };
+  });
+
+  describe('shimStreamsAPI', () => {
+    beforeEach(() => {
+      window.RTCPeerConnection.prototype.addTrack = sinon.stub();
+      shim.shimLocalStreamsAPI(window);
+      shim.shimRemoteStreamsAPI(window);
+    });
+
+    it('shimStreamsAPI existence', () => {
+      const prototype = window.RTCPeerConnection.prototype;
+      expect(prototype.addTrack.length).to.equal(2);
+      expect(prototype.addStream.length).to.equal(1);
+      expect(prototype.removeStream.length).to.equal(1);
+      expect(prototype.getLocalStreams.length).to.equal(0);
+      expect(prototype.getStreamById.length).to.equal(1);
+      expect(prototype.getRemoteStreams.length).to.equal(0);
+    });
+    it('local streams API', () => {
+      const pc = new window.RTCPeerConnection();
+      pc.getSenders = () => {
+        return [];
+      };
+      var stream = {id: 'id1', getTracks: () => {
+        return [];
+      }};
+      expect(pc.getStreamById(stream.id)).to.equal(null);
+      expect(pc.getLocalStreams().length).to.equal(0);
+      expect(pc.getRemoteStreams().length).to.equal(0);
+
+      pc.addStream(stream);
+      expect(pc.getStreamById(stream.id)).to.equal(stream);
+      expect(pc.getLocalStreams()[0]).to.equal(stream);
+      expect(pc.getRemoteStreams().length).to.equal(0);
+
+      var stream2 = {id: 'id2', getTracks: stream.getTracks};
+      pc.removeStream(stream2);
+      expect(pc.getStreamById(stream.id)).to.equal(stream);
+      expect(pc.getLocalStreams()[0]).to.equal(stream);
+
+      pc.addTrack({}, stream2);
+      expect(pc.getStreamById(stream.id)).to.equal(stream);
+      expect(pc.getStreamById(stream2.id)).to.equal(stream2);
+      expect(pc.getLocalStreams().length).to.equal(2);
+      expect(pc.getLocalStreams()[0]).to.equal(stream);
+      expect(pc.getLocalStreams()[1]).to.equal(stream2);
+
+      pc.removeStream(stream2);
+      expect(pc.getStreamById(stream.id)).to.equal(stream);
+      expect(pc.getLocalStreams().length).to.equal(1);
+      expect(pc.getLocalStreams()[0]).to.equal(stream);
+
+      pc.removeStream(stream);
+      expect(pc.getStreamById(stream.id)).to.equal(null);
+      expect(pc.getLocalStreams().length).to.equal(0);
+    });
+  });
+
+  describe('shimCallbacksAPI', () => {
+    it('shimCallbacksAPI existence', () => {
+      shim.shimCallbacksAPI(window);
+      const prototype = window.RTCPeerConnection.prototype;
+      expect(prototype.createOffer.length).to.equal(2);
+      expect(prototype.createAnswer.length).to.equal(2);
+      expect(prototype.setLocalDescription.length).to.equal(3);
+      expect(prototype.setRemoteDescription.length).to.equal(3);
+      expect(prototype.addIceCandidate.length).to.equal(3);
+    });
+  });
+
+  ['createOffer', 'createAnswer'].forEach((method) => {
+    describe('legacy ' + method + ' shim', () => {
+      describe('options passing with', () => {
+        let stub;
+        beforeEach(() => {
+          stub = sinon.stub();
+          window.RTCPeerConnection.prototype[method] = stub;
+          shim.shimCallbacksAPI(window);
+        });
+
+        it('no arguments', () => {
+          const pc = new window.RTCPeerConnection();
+          pc[method]();
+          expect(stub).to.have.been.calledWith(undefined);
+        });
+
+        it('two callbacks', () => {
+          const pc = new window.RTCPeerConnection();
+          pc[method](null, null);
+          expect(stub).to.have.been.calledWith(undefined);
+        });
+
+        it('a non-function first argument', () => {
+          const pc = new window.RTCPeerConnection();
+          pc[method](1);
+          expect(stub).to.have.been.calledWith(1);
+        });
+
+        it('two callbacks and options', () => {
+          const pc = new window.RTCPeerConnection();
+          pc[method](null, null, 1);
+          expect(stub).to.have.been.calledWith(1);
+        });
+
+        it('two callbacks and two additional arguments', () => {
+          const pc = new window.RTCPeerConnection();
+          pc[method](null, null, 1, 2);
+          expect(stub).to.have.been.calledWith(1);
+        });
+      });
+    });
+  });
+  describe('conversion of RTCIceServer.url', () => {
+    let nativeStub;
+    beforeEach(() => {
+      nativeStub = window.RTCPeerConnection;
+      shim.shimRTCIceServerUrls(window);
+    });
+
+    const stunURL = 'stun:stun.l.google.com:19302';
+    const url = {url: stunURL};
+    const urlArray = {url: [stunURL]};
+    const urls = {urls: stunURL};
+    const urlsArray = {urls: [stunURL]};
+
+    describe('does not modify RTCIceServer.urls', () => {
+      it('for strings', () => {
+        new window.RTCPeerConnection({iceServers: [urls]});
+        expect(nativeStub).to.have.been.calledWith(sinon.match({
+          iceServers: sinon.match([
+            sinon.match(urls)
+          ])
+        }));
+      });
+
+      it('for arrays', () => {
+        new window.RTCPeerConnection({iceServers: [urlsArray]});
+        expect(nativeStub).to.have.been.calledWith(sinon.match({
+          iceServers: sinon.match([
+            sinon.match(urlsArray)
+          ])
+        }));
+      });
+    });
+
+    describe('transforms RTCIceServer.url to RTCIceServer.urls', () => {
+      it('for strings', () => {
+        new window.RTCPeerConnection({iceServers: [url]});
+        expect(nativeStub).to.have.been.calledWith(sinon.match({
+          iceServers: sinon.match([
+            sinon.match(urls)
+          ])
+        }));
+      });
+
+      it('for arrays', () => {
+        new window.RTCPeerConnection({iceServers: [urlArray]});
+        expect(nativeStub).to.have.been.calledWith(sinon.match({
+          iceServers: sinon.match([
+            sinon.match(urlsArray)
+          ])
+        }));
+      });
+    });
+  });
+});

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/libjs-webrtc-adapter.git



More information about the Pkg-javascript-commits mailing list